mirror of
https://github.com/home-assistant/core.git
synced 2025-08-01 19:55:10 +02:00
Merge branch 'dev' into AddClimate_MideaCCM15
This commit is contained in:
@@ -1509,7 +1509,6 @@ omit =
|
||||
homeassistant/components/yale_smart_alarm/alarm_control_panel.py
|
||||
homeassistant/components/yale_smart_alarm/binary_sensor.py
|
||||
homeassistant/components/yale_smart_alarm/button.py
|
||||
homeassistant/components/yale_smart_alarm/coordinator.py
|
||||
homeassistant/components/yale_smart_alarm/entity.py
|
||||
homeassistant/components/yale_smart_alarm/lock.py
|
||||
homeassistant/components/yalexs_ble/__init__.py
|
||||
@@ -1524,6 +1523,9 @@ omit =
|
||||
homeassistant/components/yamaha_musiccast/select.py
|
||||
homeassistant/components/yamaha_musiccast/switch.py
|
||||
homeassistant/components/yandex_transport/sensor.py
|
||||
homeassistant/components/yardian/__init__.py
|
||||
homeassistant/components/yardian/coordinator.py
|
||||
homeassistant/components/yardian/switch.py
|
||||
homeassistant/components/yeelightsunflower/light.py
|
||||
homeassistant/components/yi/camera.py
|
||||
homeassistant/components/yolink/__init__.py
|
||||
|
12
.github/workflows/builder.yml
vendored
12
.github/workflows/builder.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -293,7 +293,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -331,7 +331,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.1.1
|
||||
|
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -87,7 +87,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: >-
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -265,7 +265,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.7.0
|
||||
id: python
|
||||
@@ -311,7 +311,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.7.0
|
||||
id: python
|
||||
@@ -360,7 +360,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.7.0
|
||||
id: python
|
||||
@@ -454,7 +454,7 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -554,7 +554,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -587,7 +587,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -631,7 +631,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -713,7 +713,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -865,7 +865,7 @@ jobs:
|
||||
ffmpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -989,7 +989,7 @@ jobs:
|
||||
ffmpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.7.0
|
||||
@@ -1084,7 +1084,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.7.0
|
||||
|
6
.github/workflows/wheels.yml
vendored
6
.github/workflows/wheels.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.3
|
||||
uses: actions/checkout@v3.6.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v3
|
||||
|
@@ -183,6 +183,7 @@ homeassistant.components.imap.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.integration.*
|
||||
homeassistant.components.ipp.*
|
||||
homeassistant.components.iqvia.*
|
||||
homeassistant.components.isy994.*
|
||||
homeassistant.components.jellyfin.*
|
||||
@@ -325,6 +326,7 @@ homeassistant.components.tplink.*
|
||||
homeassistant.components.tplink_omada.*
|
||||
homeassistant.components.tractive.*
|
||||
homeassistant.components.tradfri.*
|
||||
homeassistant.components.trafikverket_camera.*
|
||||
homeassistant.components.trafikverket_ferry.*
|
||||
homeassistant.components.trafikverket_train.*
|
||||
homeassistant.components.trafikverket_weatherstation.*
|
||||
|
@@ -1302,6 +1302,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/trace/ @home-assistant/core
|
||||
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu
|
||||
/tests/components/tractive/ @Danielhiversen @zhulik @bieniu
|
||||
/homeassistant/components/trafikverket_camera/ @gjohansson-ST
|
||||
/tests/components/trafikverket_camera/ @gjohansson-ST
|
||||
/homeassistant/components/trafikverket_ferry/ @gjohansson-ST
|
||||
/tests/components/trafikverket_ferry/ @gjohansson-ST
|
||||
/homeassistant/components/trafikverket_train/ @endor-force @gjohansson-ST
|
||||
@@ -1444,6 +1446,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/yamaha_musiccast/ @vigonotion @micha91
|
||||
/homeassistant/components/yandex_transport/ @rishatik92 @devbis
|
||||
/tests/components/yandex_transport/ @rishatik92 @devbis
|
||||
/homeassistant/components/yardian/ @h3l1o5
|
||||
/homeassistant/components/yeelight/ @zewelor @shenxn @starkillerOG @alexyao2015
|
||||
/tests/components/yeelight/ @zewelor @shenxn @starkillerOG @alexyao2015
|
||||
/homeassistant/components/yeelightsunflower/ @lindsaymarkward
|
||||
|
@@ -2,6 +2,7 @@
|
||||
"domain": "trafikverket",
|
||||
"name": "Trafikverket",
|
||||
"integrations": [
|
||||
"trafikverket_camera",
|
||||
"trafikverket_ferry",
|
||||
"trafikverket_train",
|
||||
"trafikverket_weatherstation"
|
||||
|
@@ -30,7 +30,7 @@ async def async_setup_entry(
|
||||
data: AbodeSystem = hass.data[DOMAIN]
|
||||
|
||||
async_add_entities(
|
||||
AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE) # pylint: disable=no-member
|
||||
AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE)
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA)
|
||||
)
|
||||
|
||||
|
@@ -17,7 +17,8 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_UV_INDEX,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
Forecast,
|
||||
WeatherEntity,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -27,9 +28,8 @@ from homeassistant.const import (
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
@@ -58,7 +58,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class AccuWeatherEntity(
|
||||
CoordinatorEntity[AccuWeatherDataUpdateCoordinator], WeatherEntity
|
||||
SingleCoordinatorWeatherEntity[AccuWeatherDataUpdateCoordinator]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
|
||||
@@ -76,6 +76,8 @@ class AccuWeatherEntity(
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = coordinator.device_info
|
||||
if self.coordinator.forecast:
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
@@ -174,3 +176,8 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.coordinator.data[ATTR_FORECAST]
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self.forecast
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""The AEMET OpenData component."""
|
||||
import logging
|
||||
|
||||
from aemet_opendata.interface import AEMET
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
@@ -28,7 +28,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
||||
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), api_key)
|
||||
options = ConnectionOptions(api_key, station_updates)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||
weather_coordinator = WeatherUpdateCoordinator(
|
||||
hass, aemet, latitude, longitude, station_updates
|
||||
)
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Config flow for AEMET OpenData."""
|
||||
from __future__ import annotations
|
||||
|
||||
from aemet_opendata import AEMET
|
||||
from aemet_opendata.exceptions import AuthError
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -40,10 +40,8 @@ class AemetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
aemet = AEMET(
|
||||
aiohttp_client.async_get_clientsession(self.hass),
|
||||
user_input[CONF_API_KEY],
|
||||
)
|
||||
options = ConnectionOptions(user_input[CONF_API_KEY], False)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
||||
try:
|
||||
await aemet.get_conventional_observation_stations(False)
|
||||
except AuthError:
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.3.0"]
|
||||
"requirements": ["AEMET-OpenData==0.4.0"]
|
||||
}
|
||||
|
@@ -11,8 +11,8 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
CoordinatorWeatherEntity,
|
||||
Forecast,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -22,7 +22,7 @@ from homeassistant.const import (
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
@@ -110,7 +110,7 @@ async def async_setup_entry(
|
||||
async_add_entities(entities, False)
|
||||
|
||||
|
||||
class AemetWeather(CoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
||||
class AemetWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
||||
"""Implementation of an AEMET OpenData sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
@@ -160,11 +160,13 @@ class AemetWeather(CoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
||||
"""Return the forecast array."""
|
||||
return self._forecast(self._forecast_mode)
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast(FORECAST_MODE_DAILY)
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return self._forecast(FORECAST_MODE_HOURLY)
|
||||
|
||||
|
@@ -11,6 +11,7 @@ from aemet_opendata.const import (
|
||||
AEMET_ATTR_DAY,
|
||||
AEMET_ATTR_DIRECTION,
|
||||
AEMET_ATTR_ELABORATED,
|
||||
AEMET_ATTR_FEEL_TEMPERATURE,
|
||||
AEMET_ATTR_FORECAST,
|
||||
AEMET_ATTR_HUMIDITY,
|
||||
AEMET_ATTR_ID,
|
||||
@@ -32,7 +33,6 @@ from aemet_opendata.const import (
|
||||
AEMET_ATTR_STATION_TEMPERATURE,
|
||||
AEMET_ATTR_STORM_PROBABILITY,
|
||||
AEMET_ATTR_TEMPERATURE,
|
||||
AEMET_ATTR_TEMPERATURE_FEELING,
|
||||
AEMET_ATTR_WIND,
|
||||
AEMET_ATTR_WIND_GUST,
|
||||
ATTR_DATA,
|
||||
@@ -563,7 +563,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
@staticmethod
|
||||
def _get_temperature_feeling(day_data, hour):
|
||||
"""Get temperature from weather data."""
|
||||
val = get_forecast_hour_value(day_data[AEMET_ATTR_TEMPERATURE_FEELING], hour)
|
||||
val = get_forecast_hour_value(day_data[AEMET_ATTR_FEEL_TEMPERATURE], hour)
|
||||
return format_int(val)
|
||||
|
||||
def _get_town_id(self):
|
||||
|
@@ -166,7 +166,6 @@ class AirthingsSensor(
|
||||
name += f" ({identifier})"
|
||||
|
||||
self._attr_unique_id = f"{name}_{entity_description.key}"
|
||||
self._id = airthings_device.address
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={
|
||||
(
|
||||
|
@@ -52,6 +52,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_pipeline_from_audio_stream(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
context: Context,
|
||||
event_callback: PipelineEventCallback,
|
||||
stt_metadata: stt.SpeechMetadata,
|
||||
|
@@ -49,6 +49,7 @@ from .error import (
|
||||
WakeWordDetectionError,
|
||||
WakeWordTimeoutError,
|
||||
)
|
||||
from .ring_buffer import RingBuffer
|
||||
from .vad import VoiceActivityTimeout, VoiceCommandSegmenter
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -425,7 +426,6 @@ class PipelineRun:
|
||||
|
||||
async def prepare_wake_word_detection(self) -> None:
|
||||
"""Prepare wake-word-detection."""
|
||||
# Need to add to pipeline store
|
||||
engine = wake_word.async_default_engine(self.hass)
|
||||
if engine is None:
|
||||
raise WakeWordDetectionError(
|
||||
@@ -448,7 +448,7 @@ class PipelineRun:
|
||||
async def wake_word_detection(
|
||||
self,
|
||||
stream: AsyncIterable[bytes],
|
||||
audio_buffer: list[bytes],
|
||||
audio_chunks_for_stt: list[bytes],
|
||||
) -> wake_word.DetectionResult | None:
|
||||
"""Run wake-word-detection portion of pipeline. Returns detection result."""
|
||||
metadata_dict = asdict(
|
||||
@@ -484,46 +484,29 @@ class PipelineRun:
|
||||
# Use VAD to determine timeout
|
||||
wake_word_vad = VoiceActivityTimeout(wake_word_settings.timeout)
|
||||
|
||||
# Audio chunk buffer.
|
||||
audio_bytes_to_buffer = int(
|
||||
wake_word_settings.audio_seconds_to_buffer * 16000 * 2
|
||||
)
|
||||
audio_ring_buffer = b""
|
||||
|
||||
async def timestamped_stream() -> AsyncIterable[tuple[bytes, int]]:
|
||||
"""Yield audio with timestamps (milliseconds since start of stream)."""
|
||||
nonlocal audio_ring_buffer
|
||||
|
||||
timestamp_ms = 0
|
||||
async for chunk in stream:
|
||||
yield chunk, timestamp_ms
|
||||
timestamp_ms += (len(chunk) // 2) // 16 # milliseconds @ 16Khz
|
||||
|
||||
# Keeping audio right before wake word detection allows the
|
||||
# voice command to be spoken immediately after the wake word.
|
||||
if audio_bytes_to_buffer > 0:
|
||||
audio_ring_buffer += chunk
|
||||
if len(audio_ring_buffer) > audio_bytes_to_buffer:
|
||||
# A proper ring buffer would be far more efficient
|
||||
audio_ring_buffer = audio_ring_buffer[
|
||||
len(audio_ring_buffer) - audio_bytes_to_buffer :
|
||||
]
|
||||
|
||||
if (wake_word_vad is not None) and (not wake_word_vad.process(chunk)):
|
||||
raise WakeWordTimeoutError(
|
||||
code="wake-word-timeout", message="Wake word was not detected"
|
||||
# Audio chunk buffer. This audio will be forwarded to speech-to-text
|
||||
# after wake-word-detection.
|
||||
num_audio_bytes_to_buffer = int(
|
||||
wake_word_settings.audio_seconds_to_buffer * 16000 * 2 # 16-bit @ 16Khz
|
||||
)
|
||||
stt_audio_buffer: RingBuffer | None = None
|
||||
if num_audio_bytes_to_buffer > 0:
|
||||
stt_audio_buffer = RingBuffer(num_audio_bytes_to_buffer)
|
||||
|
||||
try:
|
||||
# Detect wake word(s)
|
||||
result = await self.wake_word_provider.async_process_audio_stream(
|
||||
timestamped_stream()
|
||||
_wake_word_audio_stream(
|
||||
audio_stream=stream,
|
||||
stt_audio_buffer=stt_audio_buffer,
|
||||
wake_word_vad=wake_word_vad,
|
||||
)
|
||||
)
|
||||
|
||||
if audio_ring_buffer:
|
||||
if stt_audio_buffer is not None:
|
||||
# All audio kept from right before the wake word was detected as
|
||||
# a single chunk.
|
||||
audio_buffer.append(audio_ring_buffer)
|
||||
audio_chunks_for_stt.append(stt_audio_buffer.getvalue())
|
||||
except WakeWordTimeoutError:
|
||||
_LOGGER.debug("Timeout during wake word detection")
|
||||
raise
|
||||
@@ -540,9 +523,14 @@ class PipelineRun:
|
||||
wake_word_output: dict[str, Any] = {}
|
||||
else:
|
||||
if result.queued_audio:
|
||||
# Add audio that was pending at detection
|
||||
# Add audio that was pending at detection.
|
||||
#
|
||||
# Because detection occurs *after* the wake word was actually
|
||||
# spoken, we need to make sure pending audio is forwarded to
|
||||
# speech-to-text so the user does not have to pause before
|
||||
# speaking the voice command.
|
||||
for chunk_ts in result.queued_audio:
|
||||
audio_buffer.append(chunk_ts[0])
|
||||
audio_chunks_for_stt.append(chunk_ts[0])
|
||||
|
||||
wake_word_output = asdict(result)
|
||||
|
||||
@@ -608,41 +596,12 @@ class PipelineRun:
|
||||
)
|
||||
|
||||
try:
|
||||
segmenter = VoiceCommandSegmenter()
|
||||
|
||||
async def segment_stream(
|
||||
stream: AsyncIterable[bytes],
|
||||
) -> AsyncGenerator[bytes, None]:
|
||||
"""Stop stream when voice command is finished."""
|
||||
sent_vad_start = False
|
||||
timestamp_ms = 0
|
||||
async for chunk in stream:
|
||||
if not segmenter.process(chunk):
|
||||
# Silence detected at the end of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.STT_VAD_END,
|
||||
{"timestamp": timestamp_ms},
|
||||
)
|
||||
)
|
||||
break
|
||||
|
||||
if segmenter.in_command and (not sent_vad_start):
|
||||
# Speech detected at start of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.STT_VAD_START,
|
||||
{"timestamp": timestamp_ms},
|
||||
)
|
||||
)
|
||||
sent_vad_start = True
|
||||
|
||||
yield chunk
|
||||
timestamp_ms += (len(chunk) // 2) // 16 # milliseconds @ 16Khz
|
||||
|
||||
# Transcribe audio stream
|
||||
result = await self.stt_provider.async_process_audio_stream(
|
||||
metadata, segment_stream(stream)
|
||||
metadata,
|
||||
self._speech_to_text_stream(
|
||||
audio_stream=stream, stt_vad=VoiceCommandSegmenter()
|
||||
),
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during speech-to-text")
|
||||
@@ -677,6 +636,42 @@ class PipelineRun:
|
||||
|
||||
return result.text
|
||||
|
||||
async def _speech_to_text_stream(
|
||||
self,
|
||||
audio_stream: AsyncIterable[bytes],
|
||||
stt_vad: VoiceCommandSegmenter | None,
|
||||
sample_rate: int = 16000,
|
||||
sample_width: int = 2,
|
||||
) -> AsyncGenerator[bytes, None]:
|
||||
"""Yield audio chunks until VAD detects silence or speech-to-text completes."""
|
||||
ms_per_sample = sample_rate // 1000
|
||||
sent_vad_start = False
|
||||
timestamp_ms = 0
|
||||
async for chunk in audio_stream:
|
||||
if stt_vad is not None:
|
||||
if not stt_vad.process(chunk):
|
||||
# Silence detected at the end of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.STT_VAD_END,
|
||||
{"timestamp": timestamp_ms},
|
||||
)
|
||||
)
|
||||
break
|
||||
|
||||
if stt_vad.in_command and (not sent_vad_start):
|
||||
# Speech detected at start of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.STT_VAD_START,
|
||||
{"timestamp": timestamp_ms},
|
||||
)
|
||||
)
|
||||
sent_vad_start = True
|
||||
|
||||
yield chunk
|
||||
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
|
||||
|
||||
async def prepare_recognize_intent(self) -> None:
|
||||
"""Prepare recognizing an intent."""
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
@@ -861,13 +856,14 @@ class PipelineInput:
|
||||
"""Run pipeline."""
|
||||
self.run.start()
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
audio_buffer: list[bytes] = []
|
||||
stt_audio_buffer: list[bytes] = []
|
||||
|
||||
try:
|
||||
if current_stage == PipelineStage.WAKE_WORD:
|
||||
# wake-word-detection
|
||||
assert self.stt_stream is not None
|
||||
detect_result = await self.run.wake_word_detection(
|
||||
self.stt_stream, audio_buffer
|
||||
self.stt_stream, stt_audio_buffer
|
||||
)
|
||||
if detect_result is None:
|
||||
# No wake word. Abort the rest of the pipeline.
|
||||
@@ -882,19 +878,22 @@ class PipelineInput:
|
||||
assert self.stt_metadata is not None
|
||||
assert self.stt_stream is not None
|
||||
|
||||
if audio_buffer:
|
||||
stt_stream = self.stt_stream
|
||||
|
||||
async def buffered_stream() -> AsyncGenerator[bytes, None]:
|
||||
for chunk in audio_buffer:
|
||||
if stt_audio_buffer:
|
||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||
# This is basically an async itertools.chain.
|
||||
async def buffer_then_audio_stream() -> AsyncGenerator[bytes, None]:
|
||||
# Buffered audio
|
||||
for chunk in stt_audio_buffer:
|
||||
yield chunk
|
||||
|
||||
# Streamed audio
|
||||
assert self.stt_stream is not None
|
||||
async for chunk in self.stt_stream:
|
||||
yield chunk
|
||||
|
||||
stt_stream = cast(AsyncIterable[bytes], buffered_stream())
|
||||
else:
|
||||
stt_stream = self.stt_stream
|
||||
stt_stream = buffer_then_audio_stream()
|
||||
|
||||
intent_input = await self.run.speech_to_text(
|
||||
self.stt_metadata,
|
||||
@@ -906,6 +905,7 @@ class PipelineInput:
|
||||
tts_input = self.tts_input
|
||||
|
||||
if current_stage == PipelineStage.INTENT:
|
||||
# intent-recognition
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
@@ -915,6 +915,7 @@ class PipelineInput:
|
||||
current_stage = PipelineStage.TTS
|
||||
|
||||
if self.run.end_stage != PipelineStage.INTENT:
|
||||
# text-to-speech
|
||||
if current_stage == PipelineStage.TTS:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
@@ -999,6 +1000,36 @@ class PipelineInput:
|
||||
await asyncio.gather(*prepare_tasks)
|
||||
|
||||
|
||||
async def _wake_word_audio_stream(
|
||||
audio_stream: AsyncIterable[bytes],
|
||||
stt_audio_buffer: RingBuffer | None,
|
||||
wake_word_vad: VoiceActivityTimeout | None,
|
||||
sample_rate: int = 16000,
|
||||
sample_width: int = 2,
|
||||
) -> AsyncIterable[tuple[bytes, int]]:
|
||||
"""Yield audio chunks with timestamps (milliseconds since start of stream).
|
||||
|
||||
Adds audio to a ring buffer that will be forwarded to speech-to-text after
|
||||
detection. Times out if VAD detects enough silence.
|
||||
"""
|
||||
ms_per_sample = sample_rate // 1000
|
||||
timestamp_ms = 0
|
||||
async for chunk in audio_stream:
|
||||
yield chunk, timestamp_ms
|
||||
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
|
||||
|
||||
# Wake-word-detection occurs *after* the wake word was actually
|
||||
# spoken. Keeping audio right before detection allows the voice
|
||||
# command to be spoken immediately after the wake word.
|
||||
if stt_audio_buffer is not None:
|
||||
stt_audio_buffer.put(chunk)
|
||||
|
||||
if (wake_word_vad is not None) and (not wake_word_vad.process(chunk)):
|
||||
raise WakeWordTimeoutError(
|
||||
code="wake-word-timeout", message="Wake word was not detected"
|
||||
)
|
||||
|
||||
|
||||
class PipelinePreferred(CollectionError):
|
||||
"""Raised when attempting to delete the preferred pipelen."""
|
||||
|
||||
|
57
homeassistant/components/assist_pipeline/ring_buffer.py
Normal file
57
homeassistant/components/assist_pipeline/ring_buffer.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Implementation of a ring buffer using bytearray."""
|
||||
|
||||
|
||||
class RingBuffer:
|
||||
"""Basic ring buffer using a bytearray.
|
||||
|
||||
Not threadsafe.
|
||||
"""
|
||||
|
||||
def __init__(self, maxlen: int) -> None:
|
||||
"""Initialize empty buffer."""
|
||||
self._buffer = bytearray(maxlen)
|
||||
self._pos = 0
|
||||
self._length = 0
|
||||
self._maxlen = maxlen
|
||||
|
||||
@property
|
||||
def maxlen(self) -> int:
|
||||
"""Return the maximum size of the buffer."""
|
||||
return self._maxlen
|
||||
|
||||
@property
|
||||
def pos(self) -> int:
|
||||
"""Return the current put position."""
|
||||
return self._pos
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the length of data stored in the buffer."""
|
||||
return self._length
|
||||
|
||||
def put(self, data: bytes) -> None:
|
||||
"""Put a chunk of data into the buffer, possibly wrapping around."""
|
||||
data_len = len(data)
|
||||
new_pos = self._pos + data_len
|
||||
if new_pos >= self._maxlen:
|
||||
# Split into two chunks
|
||||
num_bytes_1 = self._maxlen - self._pos
|
||||
num_bytes_2 = new_pos - self._maxlen
|
||||
|
||||
self._buffer[self._pos : self._maxlen] = data[:num_bytes_1]
|
||||
self._buffer[:num_bytes_2] = data[num_bytes_1:]
|
||||
new_pos = new_pos - self._maxlen
|
||||
else:
|
||||
# Entire chunk fits at current position
|
||||
self._buffer[self._pos : self._pos + data_len] = data
|
||||
|
||||
self._pos = new_pos
|
||||
self._length = min(self._maxlen, self._length + data_len)
|
||||
|
||||
def getvalue(self) -> bytes:
|
||||
"""Get bytes written to the buffer."""
|
||||
if (self._pos + self._length) <= self._maxlen:
|
||||
# Single chunk
|
||||
return bytes(self._buffer[: self._length])
|
||||
|
||||
# Two chunks
|
||||
return bytes(self._buffer[self._pos :] + self._buffer[: self._pos])
|
@@ -1,12 +1,15 @@
|
||||
"""Voice activity detection."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass, field
|
||||
from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
import webrtcvad
|
||||
|
||||
_SAMPLE_RATE = 16000
|
||||
_SAMPLE_RATE: Final = 16000 # Hz
|
||||
_SAMPLE_WIDTH: Final = 2 # bytes
|
||||
|
||||
|
||||
class VadSensitivity(StrEnum):
|
||||
@@ -29,6 +32,45 @@ class VadSensitivity(StrEnum):
|
||||
return 1.0
|
||||
|
||||
|
||||
class AudioBuffer:
|
||||
"""Fixed-sized audio buffer with variable internal length."""
|
||||
|
||||
def __init__(self, maxlen: int) -> None:
|
||||
"""Initialize buffer."""
|
||||
self._buffer = bytearray(maxlen)
|
||||
self._length = 0
|
||||
|
||||
@property
|
||||
def length(self) -> int:
|
||||
"""Get number of bytes currently in the buffer."""
|
||||
return self._length
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear the buffer."""
|
||||
self._length = 0
|
||||
|
||||
def append(self, data: bytes) -> None:
|
||||
"""Append bytes to the buffer, increasing the internal length."""
|
||||
data_len = len(data)
|
||||
if (self._length + data_len) > len(self._buffer):
|
||||
raise ValueError("Length cannot be greater than buffer size")
|
||||
|
||||
self._buffer[self._length : self._length + data_len] = data
|
||||
self._length += data_len
|
||||
|
||||
def bytes(self) -> bytes:
|
||||
"""Convert written portion of buffer to bytes."""
|
||||
return bytes(self._buffer[: self._length])
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Get the number of bytes currently in the buffer."""
|
||||
return self._length
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Return True if there are bytes in the buffer."""
|
||||
return self._length > 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands using webrtcvad."""
|
||||
@@ -36,7 +78,7 @@ class VoiceCommandSegmenter:
|
||||
vad_mode: int = 3
|
||||
"""Aggressiveness in filtering out non-speech. 3 is the most aggressive."""
|
||||
|
||||
vad_frames: int = 480 # 30 ms
|
||||
vad_samples_per_chunk: int = 480 # 30 ms
|
||||
"""Must be 10, 20, or 30 ms at 16Khz."""
|
||||
|
||||
speech_seconds: float = 0.3
|
||||
@@ -67,20 +109,23 @@ class VoiceCommandSegmenter:
|
||||
"""Seconds left before resetting start/stop time counters."""
|
||||
|
||||
_vad: webrtcvad.Vad = None
|
||||
_audio_buffer: bytes = field(default_factory=bytes)
|
||||
_bytes_per_chunk: int = 480 * 2 # 16-bit samples
|
||||
_seconds_per_chunk: float = 0.03 # 30 ms
|
||||
_leftover_chunk_buffer: AudioBuffer = field(init=False)
|
||||
_bytes_per_chunk: int = field(init=False)
|
||||
_seconds_per_chunk: float = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Initialize VAD."""
|
||||
self._vad = webrtcvad.Vad(self.vad_mode)
|
||||
self._bytes_per_chunk = self.vad_frames * 2
|
||||
self._seconds_per_chunk = self.vad_frames / _SAMPLE_RATE
|
||||
self._bytes_per_chunk = self.vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||
self._seconds_per_chunk = self.vad_samples_per_chunk / _SAMPLE_RATE
|
||||
self._leftover_chunk_buffer = AudioBuffer(
|
||||
self.vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||
)
|
||||
self.reset()
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Reset all counters and state."""
|
||||
self._audio_buffer = b""
|
||||
self._leftover_chunk_buffer.clear()
|
||||
self._speech_seconds_left = self.speech_seconds
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._timeout_seconds_left = self.timeout_seconds
|
||||
@@ -92,27 +137,20 @@ class VoiceCommandSegmenter:
|
||||
|
||||
Returns False when command is done.
|
||||
"""
|
||||
self._audio_buffer += samples
|
||||
|
||||
# Process in 10, 20, or 30 ms chunks.
|
||||
num_chunks = len(self._audio_buffer) // self._bytes_per_chunk
|
||||
for chunk_idx in range(num_chunks):
|
||||
chunk_offset = chunk_idx * self._bytes_per_chunk
|
||||
chunk = self._audio_buffer[
|
||||
chunk_offset : chunk_offset + self._bytes_per_chunk
|
||||
]
|
||||
for chunk in chunk_samples(
|
||||
samples, self._bytes_per_chunk, self._leftover_chunk_buffer
|
||||
):
|
||||
if not self._process_chunk(chunk):
|
||||
self.reset()
|
||||
return False
|
||||
|
||||
if num_chunks > 0:
|
||||
# Remove from buffer
|
||||
self._audio_buffer = self._audio_buffer[
|
||||
num_chunks * self._bytes_per_chunk :
|
||||
]
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def audio_buffer(self) -> bytes:
|
||||
"""Get partial chunk in the audio buffer."""
|
||||
return self._leftover_chunk_buffer.bytes()
|
||||
|
||||
def _process_chunk(self, chunk: bytes) -> bool:
|
||||
"""Process a single chunk of 16-bit 16Khz mono audio.
|
||||
|
||||
@@ -163,7 +201,7 @@ class VoiceActivityTimeout:
|
||||
vad_mode: int = 3
|
||||
"""Aggressiveness in filtering out non-speech. 3 is the most aggressive."""
|
||||
|
||||
vad_frames: int = 480 # 30 ms
|
||||
vad_samples_per_chunk: int = 480 # 30 ms
|
||||
"""Must be 10, 20, or 30 ms at 16Khz."""
|
||||
|
||||
_silence_seconds_left: float = 0.0
|
||||
@@ -173,20 +211,23 @@ class VoiceActivityTimeout:
|
||||
"""Seconds left before resetting start/stop time counters."""
|
||||
|
||||
_vad: webrtcvad.Vad = None
|
||||
_audio_buffer: bytes = field(default_factory=bytes)
|
||||
_bytes_per_chunk: int = 480 * 2 # 16-bit samples
|
||||
_seconds_per_chunk: float = 0.03 # 30 ms
|
||||
_leftover_chunk_buffer: AudioBuffer = field(init=False)
|
||||
_bytes_per_chunk: int = field(init=False)
|
||||
_seconds_per_chunk: float = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Initialize VAD."""
|
||||
self._vad = webrtcvad.Vad(self.vad_mode)
|
||||
self._bytes_per_chunk = self.vad_frames * 2
|
||||
self._seconds_per_chunk = self.vad_frames / _SAMPLE_RATE
|
||||
self._bytes_per_chunk = self.vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||
self._seconds_per_chunk = self.vad_samples_per_chunk / _SAMPLE_RATE
|
||||
self._leftover_chunk_buffer = AudioBuffer(
|
||||
self.vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||
)
|
||||
self.reset()
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Reset all counters and state."""
|
||||
self._audio_buffer = b""
|
||||
self._leftover_chunk_buffer.clear()
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
|
||||
@@ -195,24 +236,12 @@ class VoiceActivityTimeout:
|
||||
|
||||
Returns False when timeout is reached.
|
||||
"""
|
||||
self._audio_buffer += samples
|
||||
|
||||
# Process in 10, 20, or 30 ms chunks.
|
||||
num_chunks = len(self._audio_buffer) // self._bytes_per_chunk
|
||||
for chunk_idx in range(num_chunks):
|
||||
chunk_offset = chunk_idx * self._bytes_per_chunk
|
||||
chunk = self._audio_buffer[
|
||||
chunk_offset : chunk_offset + self._bytes_per_chunk
|
||||
]
|
||||
for chunk in chunk_samples(
|
||||
samples, self._bytes_per_chunk, self._leftover_chunk_buffer
|
||||
):
|
||||
if not self._process_chunk(chunk):
|
||||
return False
|
||||
|
||||
if num_chunks > 0:
|
||||
# Remove from buffer
|
||||
self._audio_buffer = self._audio_buffer[
|
||||
num_chunks * self._bytes_per_chunk :
|
||||
]
|
||||
|
||||
return True
|
||||
|
||||
def _process_chunk(self, chunk: bytes) -> bool:
|
||||
@@ -239,3 +268,37 @@ class VoiceActivityTimeout:
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def chunk_samples(
|
||||
samples: bytes,
|
||||
bytes_per_chunk: int,
|
||||
leftover_chunk_buffer: AudioBuffer,
|
||||
) -> Iterable[bytes]:
|
||||
"""Yield fixed-sized chunks from samples, keeping leftover bytes from previous call(s)."""
|
||||
|
||||
if (len(leftover_chunk_buffer) + len(samples)) < bytes_per_chunk:
|
||||
# Extend leftover chunk, but not enough samples to complete it
|
||||
leftover_chunk_buffer.append(samples)
|
||||
return
|
||||
|
||||
next_chunk_idx = 0
|
||||
|
||||
if leftover_chunk_buffer:
|
||||
# Add to leftover chunk from previous call(s).
|
||||
bytes_to_copy = bytes_per_chunk - len(leftover_chunk_buffer)
|
||||
leftover_chunk_buffer.append(samples[:bytes_to_copy])
|
||||
next_chunk_idx = bytes_to_copy
|
||||
|
||||
# Process full chunk in buffer
|
||||
yield leftover_chunk_buffer.bytes()
|
||||
leftover_chunk_buffer.clear()
|
||||
|
||||
while next_chunk_idx < len(samples) - bytes_per_chunk + 1:
|
||||
# Process full chunk
|
||||
yield samples[next_chunk_idx : next_chunk_idx + bytes_per_chunk]
|
||||
next_chunk_idx += bytes_per_chunk
|
||||
|
||||
# Capture leftover chunks
|
||||
if rest_samples := samples[next_chunk_idx:]:
|
||||
leftover_chunk_buffer.append(rest_samples)
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from atenpdu import AtenPE, AtenPEError # pylint: disable=import-error
|
||||
from atenpdu import AtenPE, AtenPEError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import avea # pylint: disable=import-error
|
||||
import avea
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
|
@@ -4,13 +4,8 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
|
||||
# pylint: disable-next=import-error, no-name-in-module
|
||||
from azure.servicebus import ServiceBusMessage
|
||||
|
||||
# pylint: disable-next=import-error, no-name-in-module
|
||||
from azure.servicebus.aio import ServiceBusClient, ServiceBusSender
|
||||
|
||||
# pylint: disable-next=import-error, no-name-in-module
|
||||
from azure.servicebus.exceptions import (
|
||||
MessagingEntityNotFoundError,
|
||||
ServiceBusConnectionError,
|
||||
|
@@ -6,6 +6,7 @@ from asyncio import timeout
|
||||
|
||||
from aiobafi6 import Device, Service
|
||||
from aiobafi6.discovery import PORT
|
||||
from aiobafi6.exceptions import DeviceUUIDMismatchError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, Platform
|
||||
@@ -37,6 +38,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
try:
|
||||
async with timeout(RUN_TIMEOUT):
|
||||
await device.async_wait_available()
|
||||
except DeviceUUIDMismatchError as ex:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unexpected device found at {ip_address}; expected {entry.unique_id}, found {device.dns_sd_uuid}"
|
||||
) from ex
|
||||
except asyncio.TimeoutError as ex:
|
||||
run_future.cancel()
|
||||
raise ConfigEntryNotReady(f"Timed out connecting to {ip_address}") from ex
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/baf",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aiobafi6==0.8.2"],
|
||||
"requirements": ["aiobafi6==0.9.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_api._tcp.local.",
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Platform for beewi_smartclim integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from beewi_smartclim import BeewiSmartClimPoller # pylint: disable=import-error
|
||||
from beewi_smartclim import BeewiSmartClimPoller
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
|
@@ -49,7 +49,7 @@ def setup_platform(
|
||||
class BloomSkySensor(BinarySensorEntity):
|
||||
"""Representation of a single binary sensor in a BloomSky device."""
|
||||
|
||||
def __init__(self, bs, device, sensor_name): # pylint: disable=invalid-name
|
||||
def __init__(self, bs, device, sensor_name):
|
||||
"""Initialize a BloomSky binary sensor."""
|
||||
self._bloomsky = bs
|
||||
self._device_id = device["DeviceID"]
|
||||
|
@@ -93,7 +93,7 @@ def setup_platform(
|
||||
class BloomSkySensor(SensorEntity):
|
||||
"""Representation of a single sensor in a BloomSky device."""
|
||||
|
||||
def __init__(self, bs, device, sensor_name): # pylint: disable=invalid-name
|
||||
def __init__(self, bs, device, sensor_name):
|
||||
"""Initialize a BloomSky sensor."""
|
||||
self._bloomsky = bs
|
||||
self._device_id = device["DeviceID"]
|
||||
|
@@ -18,7 +18,7 @@
|
||||
"bleak-retry-connector==3.1.1",
|
||||
"bluetooth-adapters==0.16.0",
|
||||
"bluetooth-auto-recovery==1.2.1",
|
||||
"bluetooth-data-tools==1.8.0",
|
||||
"dbus-fast==1.93.0"
|
||||
"bluetooth-data-tools==1.9.0",
|
||||
"dbus-fast==1.94.0"
|
||||
]
|
||||
}
|
||||
|
@@ -39,6 +39,7 @@ class BasePassiveBluetoothCoordinator(ABC):
|
||||
self.mode = mode
|
||||
self._last_unavailable_time = 0.0
|
||||
self._last_name = address
|
||||
self._available = async_address_present(hass, address, connectable)
|
||||
|
||||
@callback
|
||||
def async_start(self) -> CALLBACK_TYPE:
|
||||
@@ -85,7 +86,17 @@ class BasePassiveBluetoothCoordinator(ABC):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return async_address_present(self.hass, self.address, self.connectable)
|
||||
return self._available
|
||||
|
||||
@callback
|
||||
def _async_handle_bluetooth_event_internal(
|
||||
self,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
change: BluetoothChange,
|
||||
) -> None:
|
||||
"""Handle a bluetooth event."""
|
||||
self._available = True
|
||||
self._async_handle_bluetooth_event(service_info, change)
|
||||
|
||||
@callback
|
||||
def _async_start(self) -> None:
|
||||
@@ -93,7 +104,7 @@ class BasePassiveBluetoothCoordinator(ABC):
|
||||
self._on_stop.append(
|
||||
async_register_callback(
|
||||
self.hass,
|
||||
self._async_handle_bluetooth_event,
|
||||
self._async_handle_bluetooth_event_internal,
|
||||
BluetoothCallbackMatcher(
|
||||
address=self.address, connectable=self.connectable
|
||||
),
|
||||
@@ -123,3 +134,4 @@ class BasePassiveBluetoothCoordinator(ABC):
|
||||
"""Handle the device going unavailable."""
|
||||
self._last_unavailable_time = service_info.time
|
||||
self._last_name = service_info.name
|
||||
self._available = False
|
||||
|
@@ -199,7 +199,7 @@ class HaBleakClientWrapper(BleakClient):
|
||||
when an integration does this.
|
||||
"""
|
||||
|
||||
def __init__( # pylint: disable=super-init-not-called, keyword-arg-before-vararg
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self,
|
||||
address_or_ble_device: str | BLEDevice,
|
||||
disconnected_callback: Callable[[BleakClient], None] | None = None,
|
||||
|
@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
import bluetooth # pylint: disable=import-error
|
||||
import bluetooth
|
||||
from bt_proximity import BluetoothRSSI
|
||||
import voluptuous as vol
|
||||
|
||||
|
@@ -16,8 +16,7 @@ SERVICE_BROWSE_URL = "browse_url"
|
||||
|
||||
SERVICE_BROWSE_URL_SCHEMA = vol.Schema(
|
||||
{
|
||||
# pylint: disable-next=no-value-for-parameter
|
||||
vol.Required(ATTR_URL, default=ATTR_URL_DEFAULT): vol.Url()
|
||||
vol.Required(ATTR_URL, default=ATTR_URL_DEFAULT): vol.Url(),
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==0.5.11"]
|
||||
"requirements": ["python-bsblan==0.5.14"]
|
||||
}
|
||||
|
@@ -43,7 +43,6 @@ OFFSET = "!!"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Required(CONF_URL): vol.Url(),
|
||||
vol.Optional(CONF_CALENDARS, default=[]): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Inclusive(CONF_USERNAME, "authentication"): cv.string,
|
||||
|
@@ -20,10 +20,12 @@ from homeassistant.components.websocket_api.connection import ActiveConnection
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -34,6 +36,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_point_in_time
|
||||
from homeassistant.helpers.template import DATE_STR_FORMAT
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -478,6 +481,8 @@ def is_offset_reached(
|
||||
class CalendarEntity(Entity):
|
||||
"""Base class for calendar event entities."""
|
||||
|
||||
_alarm_unsubs: list[CALLBACK_TYPE] = []
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the next upcoming event."""
|
||||
@@ -513,6 +518,48 @@ class CalendarEntity(Entity):
|
||||
|
||||
return STATE_OFF
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
This sets up listeners to handle state transitions for start or end of
|
||||
the current or upcoming event.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
|
||||
for unsub in self._alarm_unsubs:
|
||||
unsub()
|
||||
|
||||
now = dt_util.now()
|
||||
event = self.event
|
||||
if event is None or now >= event.end_datetime_local:
|
||||
return
|
||||
|
||||
@callback
|
||||
def update(_: datetime.datetime) -> None:
|
||||
"""Run when the active or upcoming event starts or ends."""
|
||||
self._async_write_ha_state()
|
||||
|
||||
if now < event.start_datetime_local:
|
||||
self._alarm_unsubs.append(
|
||||
async_track_point_in_time(
|
||||
self.hass,
|
||||
update,
|
||||
event.start_datetime_local,
|
||||
)
|
||||
)
|
||||
self._alarm_unsubs.append(
|
||||
async_track_point_in_time(self.hass, update, event.end_datetime_local)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass.
|
||||
|
||||
To be extended by integrations.
|
||||
"""
|
||||
for unsub in self._alarm_unsubs:
|
||||
unsub()
|
||||
|
||||
async def async_get_events(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
|
@@ -168,9 +168,14 @@ async def _async_get_image(
|
||||
"""
|
||||
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
||||
async with asyncio.timeout(timeout):
|
||||
if image_bytes := await camera.async_camera_image(
|
||||
width=width, height=height
|
||||
):
|
||||
image_bytes = (
|
||||
await _async_get_stream_image(
|
||||
camera, width=width, height=height, wait_for_next_keyframe=False
|
||||
)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image(width=width, height=height)
|
||||
)
|
||||
if image_bytes:
|
||||
content_type = camera.content_type
|
||||
image = Image(content_type, image_bytes)
|
||||
if (
|
||||
@@ -205,6 +210,21 @@ async def async_get_image(
|
||||
return await _async_get_image(camera, timeout, width, height)
|
||||
|
||||
|
||||
async def _async_get_stream_image(
|
||||
camera: Camera,
|
||||
width: int | None = None,
|
||||
height: int | None = None,
|
||||
wait_for_next_keyframe: bool = False,
|
||||
) -> bytes | None:
|
||||
if not camera.stream and camera.supported_features & SUPPORT_STREAM:
|
||||
camera.stream = await camera.async_create_stream()
|
||||
if camera.stream:
|
||||
return await camera.stream.async_get_image(
|
||||
width=width, height=height, wait_for_next_keyframe=wait_for_next_keyframe
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_get_stream_source(hass: HomeAssistant, entity_id: str) -> str | None:
|
||||
"""Fetch the stream source for a camera entity."""
|
||||
@@ -360,6 +380,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await component.async_setup(config)
|
||||
|
||||
async def preload_stream(_event: Event) -> None:
|
||||
"""Load stream prefs and start stream if preload_stream is True."""
|
||||
for camera in list(component.entities):
|
||||
stream_prefs = await prefs.get_dynamic_stream_settings(camera.entity_id)
|
||||
if not stream_prefs.preload_stream:
|
||||
@@ -459,6 +480,11 @@ class Camera(Entity):
|
||||
return self._attr_entity_picture
|
||||
return ENTITY_IMAGE_URL.format(self.entity_id, self.access_tokens[-1])
|
||||
|
||||
@property
|
||||
def use_stream_for_stills(self) -> bool:
|
||||
"""Whether or not to use stream to generate stills."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def supported_features(self) -> CameraEntityFeature:
|
||||
"""Flag supported features."""
|
||||
@@ -926,7 +952,12 @@ async def async_handle_snapshot_service(
|
||||
f"Cannot write `{snapshot_file}`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`"
|
||||
)
|
||||
|
||||
image = await camera.async_camera_image()
|
||||
async with asyncio.timeout(CAMERA_IMAGE_TIMEOUT):
|
||||
image = (
|
||||
await _async_get_stream_image(camera, wait_for_next_keyframe=True)
|
||||
if camera.use_stream_for_stills
|
||||
else await camera.async_camera_image()
|
||||
)
|
||||
|
||||
if image is None:
|
||||
return
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.69.0"]
|
||||
"requirements": ["hass-nabucasa==0.70.0"]
|
||||
}
|
||||
|
@@ -81,7 +81,7 @@ async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
|
||||
except UnidentifiedImageError as ex:
|
||||
_LOGGER.error(
|
||||
"Bad image from %s '%s' provided, are you sure it's an image? %s",
|
||||
image_type, # pylint: disable=used-before-assignment
|
||||
image_type,
|
||||
image_reference,
|
||||
ex,
|
||||
)
|
||||
|
@@ -29,7 +29,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
@@ -30,7 +30,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
|
@@ -35,7 +35,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.template_entity import (
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
CONF_AVAILABILITY,
|
||||
CONF_PICTURE,
|
||||
ManualTriggerSensorEntity,
|
||||
|
@@ -32,7 +32,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
|
@@ -143,7 +143,6 @@ class ConfigManagerFlowIndexView(FlowManagerIndexView):
|
||||
)
|
||||
async def post(self, request):
|
||||
"""Handle a POST request."""
|
||||
# pylint: disable=no-value-for-parameter
|
||||
try:
|
||||
return await super().post(request)
|
||||
except DependencyError as exc:
|
||||
@@ -175,7 +174,6 @@ class ConfigManagerFlowResourceView(FlowManagerResourceView):
|
||||
)
|
||||
async def post(self, request, flow_id):
|
||||
"""Handle a POST request."""
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request, flow_id)
|
||||
|
||||
def _prepare_result_json(self, result):
|
||||
@@ -212,7 +210,6 @@ class OptionManagerFlowIndexView(FlowManagerIndexView):
|
||||
|
||||
handler in request is entry_id.
|
||||
"""
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request)
|
||||
|
||||
|
||||
@@ -234,7 +231,6 @@ class OptionManagerFlowResourceView(FlowManagerResourceView):
|
||||
)
|
||||
async def post(self, request, flow_id):
|
||||
"""Handle a POST request."""
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request, flow_id)
|
||||
|
||||
|
||||
|
@@ -54,9 +54,7 @@ _DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
TRIGGER_CALLBACK_TYPE = Callable[ # pylint: disable=invalid-name
|
||||
[str, RecognizeResult], Awaitable[str | None]
|
||||
]
|
||||
TRIGGER_CALLBACK_TYPE = Callable[[str, RecognizeResult], Awaitable[str | None]]
|
||||
|
||||
|
||||
def json_load(fp: IO[str]) -> JsonObjectType:
|
||||
|
@@ -8,8 +8,8 @@ import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from bluepy.btle import BTLEException # pylint: disable=import-error
|
||||
import decora # pylint: disable=import-error
|
||||
from bluepy.btle import BTLEException
|
||||
import decora
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import util
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
# pylint: disable=import-error
|
||||
from decora_wifi import DecoraWiFiSession
|
||||
from decora_wifi.models.person import Person
|
||||
from decora_wifi.models.residence import Residence
|
||||
|
@@ -26,6 +26,7 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.CALENDAR,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DATE,
|
||||
@@ -54,7 +55,6 @@ COMPONENTS_WITH_DEMO_PLATFORM = [
|
||||
Platform.MAILBOX,
|
||||
Platform.NOTIFY,
|
||||
Platform.IMAGE_PROCESSING,
|
||||
Platform.CALENDAR,
|
||||
Platform.DEVICE_TRACKER,
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
@@ -1,23 +1,22 @@
|
||||
"""Demo platform that has two fake binary sensors."""
|
||||
"""Demo platform that has two fake calendars."""
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
|
||||
def setup_platform(
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Demo Calendar platform."""
|
||||
add_entities(
|
||||
"""Set up the Demo Calendar config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoCalendar(calendar_data_future(), "Calendar 1"),
|
||||
DemoCalendar(calendar_data_current(), "Calendar 2"),
|
||||
|
@@ -106,7 +106,7 @@ class DemoLight(LightEntity):
|
||||
state: bool,
|
||||
available: bool = False,
|
||||
brightness: int = 180,
|
||||
ct: int | None = None, # pylint: disable=invalid-name
|
||||
ct: int | None = None,
|
||||
effect_list: list[str] | None = None,
|
||||
effect: str | None = None,
|
||||
hs_color: tuple[int, int] | None = None,
|
||||
|
@@ -415,9 +415,7 @@ class DHCPWatcher(WatcherBase):
|
||||
"""Start watching for dhcp packets."""
|
||||
# Local import because importing from scapy has side effects such as opening
|
||||
# sockets
|
||||
from scapy import ( # pylint: disable=import-outside-toplevel,unused-import # noqa: F401
|
||||
arch,
|
||||
)
|
||||
from scapy import arch # pylint: disable=import-outside-toplevel # noqa: F401
|
||||
from scapy.layers.dhcp import DHCP # pylint: disable=import-outside-toplevel
|
||||
from scapy.layers.inet import IP # pylint: disable=import-outside-toplevel
|
||||
from scapy.layers.l2 import Ether # pylint: disable=import-outside-toplevel
|
||||
|
@@ -65,7 +65,7 @@ class DigitalOceanBinarySensor(BinarySensorEntity):
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
|
||||
def __init__(self, do, droplet_id): # pylint: disable=invalid-name
|
||||
def __init__(self, do, droplet_id):
|
||||
"""Initialize a new Digital Ocean sensor."""
|
||||
self._digital_ocean = do
|
||||
self._droplet_id = droplet_id
|
||||
|
@@ -63,7 +63,7 @@ class DigitalOceanSwitch(SwitchEntity):
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
|
||||
def __init__(self, do, droplet_id): # pylint: disable=invalid-name
|
||||
def __init__(self, do, droplet_id):
|
||||
"""Initialize a new Digital Ocean sensor."""
|
||||
self._digital_ocean = do
|
||||
self._droplet_id = droplet_id
|
||||
|
@@ -51,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
discovergy_data.meters = await discovergy_data.api_client.meters()
|
||||
except discovergyError.InvalidLogin as err:
|
||||
raise ConfigEntryAuthFailed("Invalid email or password") from err
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
except Exception as err:
|
||||
raise ConfigEntryNotReady(
|
||||
"Unexpected error while while getting meters"
|
||||
) from err
|
||||
|
@@ -8,14 +8,11 @@ from pydiscovergy.models import Meter
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import DiscovergyData
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT_CONFIG_ENTRY = {CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID, "title"}
|
||||
|
||||
TO_REDACT_METER = {
|
||||
"serial_number",
|
||||
"full_serial_number",
|
||||
@@ -44,7 +41,6 @@ async def async_get_config_entry_diagnostics(
|
||||
last_readings[meter.meter_id] = asdict(coordinator.data)
|
||||
|
||||
return {
|
||||
"entry": async_redact_data(entry.as_dict(), TO_REDACT_CONFIG_ENTRY),
|
||||
"meters": flattened_meter,
|
||||
"readings": last_readings,
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import io
|
||||
|
||||
import face_recognition # pylint: disable=import-error
|
||||
import face_recognition
|
||||
|
||||
from homeassistant.components.image_processing import ImageProcessingFaceEntity
|
||||
from homeassistant.const import ATTR_LOCATION, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
||||
@@ -11,7 +11,6 @@ from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from homeassistant.components.image_processing import ( # noqa: F401, isort:skip
|
||||
PLATFORM_SCHEMA,
|
||||
)
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
import io
|
||||
import logging
|
||||
|
||||
# pylint: disable=import-error
|
||||
import face_recognition
|
||||
import voluptuous as vol
|
||||
|
||||
|
@@ -509,7 +509,7 @@ async def async_setup_entry(
|
||||
if stop_listener and (
|
||||
hass.state == CoreState.not_running or hass.is_running
|
||||
):
|
||||
stop_listener() # pylint: disable=not-callable
|
||||
stop_listener()
|
||||
|
||||
if transport:
|
||||
transport.close()
|
||||
|
@@ -138,6 +138,6 @@ def async_track_time_interval_backoff(
|
||||
def remove_listener() -> None:
|
||||
"""Remove interval listener."""
|
||||
if remove:
|
||||
remove() # pylint: disable=not-callable
|
||||
remove()
|
||||
|
||||
return remove_listener
|
||||
|
@@ -7,7 +7,6 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
# pylint: disable=import-error
|
||||
from beacontools import BeaconScanner, EddystoneFilter, EddystoneTLMFrame
|
||||
import voluptuous as vol
|
||||
|
||||
|
@@ -46,16 +46,18 @@ class ElectricKiwiHOPSensorEntityDescription(
|
||||
def _check_and_move_time(hop: Hop, time: str) -> datetime:
|
||||
"""Return the time a day forward if HOP end_time is in the past."""
|
||||
date_time = datetime.combine(
|
||||
datetime.today(),
|
||||
dt_util.start_of_local_day(),
|
||||
datetime.strptime(time, "%I:%M %p").time(),
|
||||
).astimezone(dt_util.DEFAULT_TIME_ZONE)
|
||||
dt_util.DEFAULT_TIME_ZONE,
|
||||
)
|
||||
|
||||
end_time = datetime.combine(
|
||||
datetime.today(),
|
||||
dt_util.start_of_local_day(),
|
||||
datetime.strptime(hop.end.end_time, "%I:%M %p").time(),
|
||||
).astimezone(dt_util.DEFAULT_TIME_ZONE)
|
||||
dt_util.DEFAULT_TIME_ZONE,
|
||||
)
|
||||
|
||||
if end_time < datetime.now().astimezone(dt_util.DEFAULT_TIME_ZONE):
|
||||
if end_time < dt_util.now():
|
||||
return date_time + timedelta(days=1)
|
||||
return date_time
|
||||
|
||||
|
@@ -225,7 +225,7 @@ class Config:
|
||||
@callback
|
||||
def _clear_exposed_cache(self, event: EventType[EventStateChangedData]) -> None:
|
||||
"""Clear the cache of exposed states."""
|
||||
self.get_exposed_states.cache_clear() # pylint: disable=no-member
|
||||
self.get_exposed_states.cache_clear()
|
||||
|
||||
def is_state_exposed(self, state: State) -> bool:
|
||||
"""Cache determine if an entity should be exposed on the emulated bridge."""
|
||||
|
@@ -50,6 +50,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"highest_price_time": coordinator.data.energy_today.highest_price_time,
|
||||
"lowest_price_time": coordinator.data.energy_today.lowest_price_time,
|
||||
"percentage_of_max": coordinator.data.energy_today.pct_of_max_price,
|
||||
"hours_priced_equal_or_lower": coordinator.data.energy_today.hours_priced_equal_or_lower,
|
||||
},
|
||||
"gas": {
|
||||
"current_hour_price": get_gas_price(coordinator.data, 0),
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/energyzero",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["energyzero==0.4.1"]
|
||||
"requirements": ["energyzero==0.5.0"]
|
||||
}
|
||||
|
@@ -13,7 +13,13 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CURRENCY_EURO, PERCENTAGE, UnitOfEnergy, UnitOfVolume
|
||||
from homeassistant.const import (
|
||||
CURRENCY_EURO,
|
||||
PERCENTAGE,
|
||||
UnitOfEnergy,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -114,6 +120,14 @@ SENSORS: tuple[EnergyZeroSensorEntityDescription, ...] = (
|
||||
icon="mdi:percent",
|
||||
value_fn=lambda data: data.energy_today.pct_of_max_price,
|
||||
),
|
||||
EnergyZeroSensorEntityDescription(
|
||||
key="hours_priced_equal_or_lower",
|
||||
translation_key="hours_priced_equal_or_lower",
|
||||
service_type="today_energy",
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
icon="mdi:clock",
|
||||
value_fn=lambda data: data.energy_today.hours_priced_equal_or_lower,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -37,9 +37,6 @@
|
||||
},
|
||||
"hours_priced_equal_or_lower": {
|
||||
"name": "Hours priced equal or lower than current - today"
|
||||
},
|
||||
"hours_priced_equal_or_higher": {
|
||||
"name": "Hours priced equal or higher than current - today"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -22,8 +22,8 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||
ATTR_FORECAST_TIME,
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
CoordinatorWeatherEntity,
|
||||
Forecast,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -33,7 +33,7 @@ from homeassistant.const import (
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -86,7 +86,7 @@ def _calculate_unique_id(config_entry_unique_id: str | None, hourly: bool) -> st
|
||||
return f"{config_entry_unique_id}{'-hourly' if hourly else '-daily'}"
|
||||
|
||||
|
||||
class ECWeather(CoordinatorWeatherEntity):
|
||||
class ECWeather(SingleCoordinatorWeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -182,11 +182,13 @@ class ECWeather(CoordinatorWeatherEntity):
|
||||
"""Return the forecast array."""
|
||||
return get_forecast(self.ec_data, self._hourly)
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast] | None:
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
return get_forecast(self.ec_data, False)
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return get_forecast(self.ec_data, True)
|
||||
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import eq3bt as eq3 # pylint: disable=import-error
|
||||
import eq3bt as eq3
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
|
@@ -51,9 +51,7 @@ CCCD_INDICATE_BYTES = b"\x02\x00"
|
||||
DEFAULT_MAX_WRITE_WITHOUT_RESPONSE = DEFAULT_MTU - GATT_HEADER_SIZE
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_WrapFuncType = TypeVar( # pylint: disable=invalid-name
|
||||
"_WrapFuncType", bound=Callable[..., Any]
|
||||
)
|
||||
_WrapFuncType = TypeVar("_WrapFuncType", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def mac_to_int(address: str) -> int:
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
import functools
|
||||
import math
|
||||
from typing import Any, Generic, TypeVar, cast # pylint: disable=unused-import
|
||||
from typing import Any, Generic, TypeVar, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
EntityCategory as EsphomeEntityCategory,
|
||||
|
@@ -181,7 +181,6 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
try_keep_current_mode = False
|
||||
|
||||
if (rgbw_ha := kwargs.get(ATTR_RGBW_COLOR)) is not None:
|
||||
# pylint: disable-next=invalid-name
|
||||
*rgb, w = tuple(x / 255 for x in rgbw_ha) # type: ignore[assignment]
|
||||
color_bri = max(rgb)
|
||||
# normalize rgb
|
||||
@@ -194,7 +193,6 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
try_keep_current_mode = False
|
||||
|
||||
if (rgbww_ha := kwargs.get(ATTR_RGBWW_COLOR)) is not None:
|
||||
# pylint: disable-next=invalid-name
|
||||
*rgb, cw, ww = tuple(x / 255 for x in rgbww_ha) # type: ignore[assignment]
|
||||
color_bri = max(rgb)
|
||||
# normalize rgb
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"requirements": [
|
||||
"async_interrupt==1.1.1",
|
||||
"aioesphomeapi==16.0.1",
|
||||
"bluetooth-data-tools==1.8.0",
|
||||
"bluetooth-data-tools==1.9.0",
|
||||
"esphome-dashboard-api==1.2.3"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
|
@@ -134,7 +134,6 @@ class EufyHomeLight(LightEntity):
|
||||
"""Turn the specified light on."""
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
colortemp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
# pylint: disable-next=invalid-name
|
||||
hs = kwargs.get(ATTR_HS_COLOR)
|
||||
|
||||
if brightness is not None:
|
||||
|
@@ -17,7 +17,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
@@ -207,7 +207,7 @@ class FeedManager:
|
||||
self._firstrun = False
|
||||
else:
|
||||
# Set last entry timestamp as epoch time if not available
|
||||
self._last_entry_timestamp = datetime.utcfromtimestamp(0).timetuple()
|
||||
self._last_entry_timestamp = dt_util.utc_from_timestamp(0).timetuple()
|
||||
for entry in self._feed.entries:
|
||||
if (
|
||||
self._firstrun
|
||||
@@ -286,6 +286,6 @@ class StoredData:
|
||||
def _async_save_data(self) -> dict[str, str]:
|
||||
"""Save feed data to storage."""
|
||||
return {
|
||||
feed_id: utc_from_timestamp(timegm(struct_utc)).isoformat()
|
||||
feed_id: dt_util.utc_from_timestamp(timegm(struct_utc)).isoformat()
|
||||
for feed_id, struct_utc in self._data.items()
|
||||
}
|
||||
|
@@ -102,9 +102,7 @@ class FileSizeCoordinator(DataUpdateCoordinator):
|
||||
raise UpdateFailed(f"Can not retrieve file statistics {error}") from error
|
||||
|
||||
size = statinfo.st_size
|
||||
last_updated = datetime.utcfromtimestamp(statinfo.st_mtime).replace(
|
||||
tzinfo=dt_util.UTC
|
||||
)
|
||||
last_updated = dt_util.utc_from_timestamp(statinfo.st_mtime)
|
||||
|
||||
_LOGGER.debug("size %s, last updated %s", size, last_updated)
|
||||
data: dict[str, int | float | datetime] = {
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from gardena_bluetooth.const import Valve
|
||||
from gardena_bluetooth.const import Sensor, Valve
|
||||
from gardena_bluetooth.parse import CharacteristicBool
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -26,6 +26,11 @@ class GardenaBluetoothBinarySensorEntityDescription(BinarySensorEntityDescriptio
|
||||
|
||||
char: CharacteristicBool = field(default_factory=lambda: CharacteristicBool(""))
|
||||
|
||||
@property
|
||||
def context(self) -> set[str]:
|
||||
"""Context needed for update coordinator."""
|
||||
return {self.char.uuid}
|
||||
|
||||
|
||||
DESCRIPTIONS = (
|
||||
GardenaBluetoothBinarySensorEntityDescription(
|
||||
@@ -35,6 +40,13 @@ DESCRIPTIONS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
char=Valve.connected_state,
|
||||
),
|
||||
GardenaBluetoothBinarySensorEntityDescription(
|
||||
key=Sensor.connected_state.uuid,
|
||||
translation_key="sensor_connected_state",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
char=Sensor.connected_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -44,7 +56,7 @@ async def async_setup_entry(
|
||||
"""Set up binary sensor based on a config entry."""
|
||||
coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entities = [
|
||||
GardenaBluetoothBinarySensor(coordinator, description)
|
||||
GardenaBluetoothBinarySensor(coordinator, description, description.context)
|
||||
for description in DESCRIPTIONS
|
||||
if description.key in coordinator.characteristics
|
||||
]
|
||||
|
@@ -22,6 +22,11 @@ class GardenaBluetoothButtonEntityDescription(ButtonEntityDescription):
|
||||
|
||||
char: CharacteristicBool = field(default_factory=lambda: CharacteristicBool(""))
|
||||
|
||||
@property
|
||||
def context(self) -> set[str]:
|
||||
"""Context needed for update coordinator."""
|
||||
return {self.char.uuid}
|
||||
|
||||
|
||||
DESCRIPTIONS = (
|
||||
GardenaBluetoothButtonEntityDescription(
|
||||
@@ -40,7 +45,7 @@ async def async_setup_entry(
|
||||
"""Set up button based on a config entry."""
|
||||
coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entities = [
|
||||
GardenaBluetoothButton(coordinator, description)
|
||||
GardenaBluetoothButton(coordinator, description, description.context)
|
||||
for description in DESCRIPTIONS
|
||||
if description.key in coordinator.characteristics
|
||||
]
|
||||
|
@@ -117,18 +117,25 @@ class GardenaBluetoothEntity(CoordinatorEntity[Coordinator]):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bluetooth.async_address_present(
|
||||
return (
|
||||
self.coordinator.last_update_success
|
||||
and bluetooth.async_address_present(
|
||||
self.hass, self.coordinator.address, True
|
||||
)
|
||||
and self._attr_available
|
||||
)
|
||||
|
||||
|
||||
class GardenaBluetoothDescriptorEntity(GardenaBluetoothEntity):
|
||||
"""Coordinator entity for entities with entity description."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: Coordinator, description: EntityDescription
|
||||
self,
|
||||
coordinator: Coordinator,
|
||||
description: EntityDescription,
|
||||
context: set[str],
|
||||
) -> None:
|
||||
"""Initialize description entity."""
|
||||
super().__init__(coordinator, {description.key})
|
||||
super().__init__(coordinator, context)
|
||||
self._attr_unique_id = f"{coordinator.address}-{description.key}"
|
||||
self.entity_description = description
|
||||
|
@@ -3,8 +3,9 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from gardena_bluetooth.const import DeviceConfiguration, Valve
|
||||
from gardena_bluetooth.const import DeviceConfiguration, Sensor, Valve
|
||||
from gardena_bluetooth.parse import (
|
||||
Characteristic,
|
||||
CharacteristicInt,
|
||||
CharacteristicLong,
|
||||
CharacteristicUInt16,
|
||||
@@ -16,7 +17,7 @@ from homeassistant.components.number import (
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
@@ -35,6 +36,15 @@ class GardenaBluetoothNumberEntityDescription(NumberEntityDescription):
|
||||
char: CharacteristicInt | CharacteristicUInt16 | CharacteristicLong = field(
|
||||
default_factory=lambda: CharacteristicInt("")
|
||||
)
|
||||
connected_state: Characteristic | None = None
|
||||
|
||||
@property
|
||||
def context(self) -> set[str]:
|
||||
"""Context needed for update coordinator."""
|
||||
data = {self.char.uuid}
|
||||
if self.connected_state:
|
||||
data.add(self.connected_state.uuid)
|
||||
return data
|
||||
|
||||
|
||||
DESCRIPTIONS = (
|
||||
@@ -81,6 +91,18 @@ DESCRIPTIONS = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
char=DeviceConfiguration.seasonal_adjust,
|
||||
),
|
||||
GardenaBluetoothNumberEntityDescription(
|
||||
key=Sensor.threshold.uuid,
|
||||
translation_key="sensor_threshold",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0.0,
|
||||
native_max_value=100.0,
|
||||
native_step=1.0,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
char=Sensor.threshold,
|
||||
connected_state=Sensor.connected_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -90,7 +112,7 @@ async def async_setup_entry(
|
||||
"""Set up entity based on a config entry."""
|
||||
coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entities: list[NumberEntity] = [
|
||||
GardenaBluetoothNumber(coordinator, description)
|
||||
GardenaBluetoothNumber(coordinator, description, description.context)
|
||||
for description in DESCRIPTIONS
|
||||
if description.key in coordinator.characteristics
|
||||
]
|
||||
@@ -110,6 +132,12 @@ class GardenaBluetoothNumber(GardenaBluetoothDescriptorEntity, NumberEntity):
|
||||
self._attr_native_value = None
|
||||
else:
|
||||
self._attr_native_value = float(data)
|
||||
|
||||
if char := self.entity_description.connected_state:
|
||||
self._attr_available = bool(self.coordinator.get_cached(char))
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from gardena_bluetooth.const import Battery, Valve
|
||||
from gardena_bluetooth.const import Battery, Sensor, Valve
|
||||
from gardena_bluetooth.parse import Characteristic
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -32,6 +32,15 @@ class GardenaBluetoothSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description of entity."""
|
||||
|
||||
char: Characteristic = field(default_factory=lambda: Characteristic(""))
|
||||
connected_state: Characteristic | None = None
|
||||
|
||||
@property
|
||||
def context(self) -> set[str]:
|
||||
"""Context needed for update coordinator."""
|
||||
data = {self.char.uuid}
|
||||
if self.connected_state:
|
||||
data.add(self.connected_state.uuid)
|
||||
return data
|
||||
|
||||
|
||||
DESCRIPTIONS = (
|
||||
@@ -51,6 +60,40 @@ DESCRIPTIONS = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
char=Battery.battery_level,
|
||||
),
|
||||
GardenaBluetoothSensorEntityDescription(
|
||||
key=Sensor.battery_level.uuid,
|
||||
translation_key="sensor_battery_level",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
char=Sensor.battery_level,
|
||||
connected_state=Sensor.connected_state,
|
||||
),
|
||||
GardenaBluetoothSensorEntityDescription(
|
||||
key=Sensor.value.uuid,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.MOISTURE,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
char=Sensor.value,
|
||||
connected_state=Sensor.connected_state,
|
||||
),
|
||||
GardenaBluetoothSensorEntityDescription(
|
||||
key=Sensor.type.uuid,
|
||||
translation_key="sensor_type",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
char=Sensor.type,
|
||||
connected_state=Sensor.connected_state,
|
||||
),
|
||||
GardenaBluetoothSensorEntityDescription(
|
||||
key=Sensor.measurement_timestamp.uuid,
|
||||
translation_key="sensor_measurement_timestamp",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
char=Sensor.measurement_timestamp,
|
||||
connected_state=Sensor.connected_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -60,7 +103,7 @@ async def async_setup_entry(
|
||||
"""Set up Gardena Bluetooth sensor based on a config entry."""
|
||||
coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entities: list[GardenaBluetoothEntity] = [
|
||||
GardenaBluetoothSensor(coordinator, description)
|
||||
GardenaBluetoothSensor(coordinator, description, description.context)
|
||||
for description in DESCRIPTIONS
|
||||
if description.key in coordinator.characteristics
|
||||
]
|
||||
@@ -81,6 +124,12 @@ class GardenaBluetoothSensor(GardenaBluetoothDescriptorEntity, SensorEntity):
|
||||
tzinfo=dt_util.get_time_zone(self.hass.config.time_zone)
|
||||
)
|
||||
self._attr_native_value = value
|
||||
|
||||
if char := self.entity_description.connected_state:
|
||||
self._attr_available = bool(self.coordinator.get_cached(char))
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
|
||||
|
@@ -23,6 +23,9 @@
|
||||
"binary_sensor": {
|
||||
"valve_connected_state": {
|
||||
"name": "Valve connection"
|
||||
},
|
||||
"sensor_connected_state": {
|
||||
"name": "Sensor connection"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
@@ -45,12 +48,24 @@
|
||||
},
|
||||
"seasonal_adjust": {
|
||||
"name": "Seasonal adjust"
|
||||
},
|
||||
"sensor_threshold": {
|
||||
"name": "Sensor threshold"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"activation_reason": {
|
||||
"name": "Activation reason"
|
||||
},
|
||||
"sensor_battery_level": {
|
||||
"name": "Sensor battery"
|
||||
},
|
||||
"sensor_type": {
|
||||
"name": "Sensor type"
|
||||
},
|
||||
"sensor_measurement_timestamp": {
|
||||
"name": "Sensor timestamp"
|
||||
},
|
||||
"remaining_open_timestamp": {
|
||||
"name": "Valve closing"
|
||||
}
|
||||
|
@@ -172,15 +172,16 @@ class GenericCamera(Camera):
|
||||
self._last_url = None
|
||||
self._last_image = None
|
||||
|
||||
@property
|
||||
def use_stream_for_stills(self) -> bool:
|
||||
"""Whether or not to use stream to generate stills."""
|
||||
return not self._still_image_url
|
||||
|
||||
async def async_camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image response from the camera."""
|
||||
if not self._still_image_url:
|
||||
if not self.stream:
|
||||
await self.async_create_stream()
|
||||
if self.stream:
|
||||
return await self.stream.async_get_image(width, height)
|
||||
return None
|
||||
try:
|
||||
url = self._still_image_url.async_render(parse_result=False)
|
||||
|
@@ -36,7 +36,7 @@ from homeassistant.components.calendar import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITIES, CONF_NAME, CONF_OFFSET
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, PlatformNotReady
|
||||
from homeassistant.helpers import entity_platform, entity_registry as er
|
||||
from homeassistant.helpers.entity import generate_entity_id
|
||||
@@ -383,7 +383,6 @@ class GoogleCalendarEntity(
|
||||
self._event: CalendarEvent | None = None
|
||||
self._attr_name = data[CONF_NAME].capitalize()
|
||||
self._offset = data.get(CONF_OFFSET, DEFAULT_CONF_OFFSET)
|
||||
self._offset_value: timedelta | None = None
|
||||
self.entity_id = entity_id
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_entity_registry_enabled_default = entity_enabled
|
||||
@@ -392,17 +391,6 @@ class GoogleCalendarEntity(
|
||||
CalendarEntityFeature.CREATE_EVENT | CalendarEntityFeature.DELETE_EVENT
|
||||
)
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Enable polling for the entity.
|
||||
|
||||
The coordinator is not used by multiple entities, but instead
|
||||
is used to poll the calendar API at a separate interval from the
|
||||
entity state updates itself which happen more frequently (e.g. to
|
||||
fire an alarm when the next event starts).
|
||||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, bool]:
|
||||
"""Return the device state attributes."""
|
||||
@@ -411,16 +399,16 @@ class GoogleCalendarEntity(
|
||||
@property
|
||||
def offset_reached(self) -> bool:
|
||||
"""Return whether or not the event offset was reached."""
|
||||
if self._event and self._offset_value:
|
||||
return is_offset_reached(
|
||||
self._event.start_datetime_local, self._offset_value
|
||||
)
|
||||
(event, offset_value) = self._event_with_offset()
|
||||
if event is not None and offset_value is not None:
|
||||
return is_offset_reached(event.start_datetime_local, offset_value)
|
||||
return False
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the next upcoming event."""
|
||||
return self._event
|
||||
(event, _) = self._event_with_offset()
|
||||
return event
|
||||
|
||||
def _event_filter(self, event: Event) -> bool:
|
||||
"""Return True if the event is visible."""
|
||||
@@ -435,12 +423,10 @@ class GoogleCalendarEntity(
|
||||
# We do not ask for an update with async_add_entities()
|
||||
# because it will update disabled entities. This is started as a
|
||||
# task to let if sync in the background without blocking startup
|
||||
async def refresh() -> None:
|
||||
await self.coordinator.async_request_refresh()
|
||||
self._apply_coordinator_update()
|
||||
|
||||
self.coordinator.config_entry.async_create_background_task(
|
||||
self.hass, refresh(), "google.calendar-refresh"
|
||||
self.hass,
|
||||
self.coordinator.async_request_refresh(),
|
||||
"google.calendar-refresh",
|
||||
)
|
||||
|
||||
async def async_get_events(
|
||||
@@ -453,8 +439,10 @@ class GoogleCalendarEntity(
|
||||
for event in filter(self._event_filter, result_items)
|
||||
]
|
||||
|
||||
def _apply_coordinator_update(self) -> None:
|
||||
"""Copy state from the coordinator to this entity."""
|
||||
def _event_with_offset(
|
||||
self,
|
||||
) -> tuple[CalendarEvent | None, timedelta | None]:
|
||||
"""Get the calendar event and offset if any."""
|
||||
if api_event := next(
|
||||
filter(
|
||||
self._event_filter,
|
||||
@@ -462,27 +450,13 @@ class GoogleCalendarEntity(
|
||||
),
|
||||
None,
|
||||
):
|
||||
self._event = _get_calendar_event(api_event)
|
||||
(self._event.summary, self._offset_value) = extract_offset(
|
||||
self._event.summary, self._offset
|
||||
event = _get_calendar_event(api_event)
|
||||
if self._offset:
|
||||
(event.summary, offset_value) = extract_offset(
|
||||
event.summary, self._offset
|
||||
)
|
||||
else:
|
||||
self._event = None
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._apply_coordinator_update()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Disable update behavior.
|
||||
|
||||
This relies on the coordinator callback update to write home assistant
|
||||
state with the next calendar event. This update is a no-op as no new data
|
||||
fetch is needed to evaluate the state to determine if the next event has
|
||||
started, handled by CalendarEntity parent class.
|
||||
"""
|
||||
return event, offset_value
|
||||
return None, None
|
||||
|
||||
async def async_create_event(self, **kwargs: Any) -> None:
|
||||
"""Add a new event to calendar."""
|
||||
|
@@ -147,6 +147,6 @@ def async_enable_report_state(hass: HomeAssistant, google_config: AbstractConfig
|
||||
def unsub_all():
|
||||
unsub()
|
||||
if unsub_pending:
|
||||
unsub_pending() # pylint: disable=not-callable
|
||||
unsub_pending()
|
||||
|
||||
return unsub_all
|
||||
|
@@ -60,9 +60,7 @@ class OAuth2FlowHandler(
|
||||
|
||||
def _get_profile() -> str:
|
||||
"""Get profile from inside the executor."""
|
||||
users = build( # pylint: disable=no-member
|
||||
"gmail", "v1", credentials=credentials
|
||||
).users()
|
||||
users = build("gmail", "v1", credentials=credentials).users()
|
||||
return users.getProfile(userId="me").execute()["emailAddress"]
|
||||
|
||||
credentials = Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN])
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Collection, Iterable
|
||||
from collections.abc import Callable, Collection, Iterable, Mapping
|
||||
from contextvars import ContextVar
|
||||
import logging
|
||||
from typing import Any, Protocol, cast
|
||||
@@ -473,9 +473,60 @@ class GroupEntity(Entity):
|
||||
"""Representation of a Group of entities."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_entity_ids: list[str]
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
for entity_id in self._entity_ids:
|
||||
if (state := self.hass.states.get(entity_id)) is None:
|
||||
continue
|
||||
self.async_update_supported_features(entity_id, state)
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
if event:
|
||||
self.async_update_supported_features(
|
||||
event.data["entity_id"], event.data["new_state"]
|
||||
)
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listeners."""
|
||||
for entity_id in self._entity_ids:
|
||||
if (state := self.hass.states.get(entity_id)) is None:
|
||||
continue
|
||||
self.async_update_supported_features(entity_id, state)
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_update_supported_features(
|
||||
event.data["entity_id"], event.data["new_state"]
|
||||
)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
async def _update_at_start(_: HomeAssistant) -> None:
|
||||
self.async_update_group_state()
|
||||
@@ -493,9 +544,18 @@ class GroupEntity(Entity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@abstractmethod
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Abstract method to update the entity."""
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: State | None,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
|
||||
|
||||
class Group(Entity):
|
||||
"""Track a group of entity ids."""
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Platform allowing several binary sensor to be grouped into one binary sensor."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -24,14 +23,10 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
|
||||
@@ -92,6 +87,20 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_binary_sensor(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> BinarySensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
return BinarySensorGroup(
|
||||
None,
|
||||
name,
|
||||
None,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config[CONF_ALL],
|
||||
)
|
||||
|
||||
|
||||
class BinarySensorGroup(GroupEntity, BinarySensorEntity):
|
||||
"""Representation of a BinarySensorGroup."""
|
||||
|
||||
@@ -116,45 +125,6 @@ class BinarySensorGroup(GroupEntity, BinarySensorEntity):
|
||||
if mode:
|
||||
self.mode = all
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the binary sensor group state."""
|
||||
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine, Mapping
|
||||
from functools import partial
|
||||
from typing import Any, Literal, cast
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -21,10 +21,17 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
entity_selector_without_own_entities,
|
||||
)
|
||||
|
||||
from . import DOMAIN
|
||||
from .binary_sensor import CONF_ALL, BinarySensorGroup
|
||||
from . import DOMAIN, GroupEntity
|
||||
from .binary_sensor import CONF_ALL, async_create_preview_binary_sensor
|
||||
from .const import CONF_HIDE_MEMBERS, CONF_IGNORE_NON_NUMERIC
|
||||
from .sensor import SensorGroup
|
||||
from .cover import async_create_preview_cover
|
||||
from .event import async_create_preview_event
|
||||
from .fan import async_create_preview_fan
|
||||
from .light import async_create_preview_light
|
||||
from .lock import async_create_preview_lock
|
||||
from .media_player import MediaPlayerGroup, async_create_preview_media_player
|
||||
from .sensor import async_create_preview_sensor
|
||||
from .switch import async_create_preview_switch
|
||||
|
||||
_STATISTIC_MEASURES = [
|
||||
"min",
|
||||
@@ -122,7 +129,7 @@ SENSOR_CONFIG_SCHEMA = basic_group_config_schema(
|
||||
|
||||
|
||||
async def light_switch_options_schema(
|
||||
domain: str, handler: SchemaCommonFlowHandler
|
||||
domain: str, handler: SchemaCommonFlowHandler | None
|
||||
) -> vol.Schema:
|
||||
"""Generate options schema."""
|
||||
return (await basic_group_options_schema(domain, handler)).extend(
|
||||
@@ -137,6 +144,7 @@ async def light_switch_options_schema(
|
||||
GROUP_TYPES = [
|
||||
"binary_sensor",
|
||||
"cover",
|
||||
"event",
|
||||
"fan",
|
||||
"light",
|
||||
"lock",
|
||||
@@ -171,36 +179,47 @@ CONFIG_FLOW = {
|
||||
"user": SchemaFlowMenuStep(GROUP_TYPES),
|
||||
"binary_sensor": SchemaFlowFormStep(
|
||||
BINARY_SENSOR_CONFIG_SCHEMA,
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("binary_sensor"),
|
||||
preview="group_binary_sensor",
|
||||
),
|
||||
"cover": SchemaFlowFormStep(
|
||||
basic_group_config_schema("cover"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("cover"),
|
||||
),
|
||||
"event": SchemaFlowFormStep(
|
||||
basic_group_config_schema("event"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("event"),
|
||||
),
|
||||
"fan": SchemaFlowFormStep(
|
||||
basic_group_config_schema("fan"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("fan"),
|
||||
),
|
||||
"light": SchemaFlowFormStep(
|
||||
basic_group_config_schema("light"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("light"),
|
||||
),
|
||||
"lock": SchemaFlowFormStep(
|
||||
basic_group_config_schema("lock"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("lock"),
|
||||
),
|
||||
"media_player": SchemaFlowFormStep(
|
||||
basic_group_config_schema("media_player"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("media_player"),
|
||||
),
|
||||
"sensor": SchemaFlowFormStep(
|
||||
SENSOR_CONFIG_SCHEMA,
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("sensor"),
|
||||
preview="group_sensor",
|
||||
),
|
||||
"switch": SchemaFlowFormStep(
|
||||
basic_group_config_schema("switch"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("switch"),
|
||||
),
|
||||
}
|
||||
@@ -210,20 +229,57 @@ OPTIONS_FLOW = {
|
||||
"init": SchemaFlowFormStep(next_step=choose_options_step),
|
||||
"binary_sensor": SchemaFlowFormStep(
|
||||
binary_sensor_options_schema,
|
||||
preview="group_binary_sensor",
|
||||
preview="group",
|
||||
),
|
||||
"cover": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "cover"),
|
||||
preview="group",
|
||||
),
|
||||
"event": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "event"),
|
||||
preview="group",
|
||||
),
|
||||
"fan": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "fan"),
|
||||
preview="group",
|
||||
),
|
||||
"light": SchemaFlowFormStep(
|
||||
partial(light_switch_options_schema, "light"),
|
||||
preview="group",
|
||||
),
|
||||
"lock": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "lock"),
|
||||
preview="group",
|
||||
),
|
||||
"cover": SchemaFlowFormStep(partial(basic_group_options_schema, "cover")),
|
||||
"fan": SchemaFlowFormStep(partial(basic_group_options_schema, "fan")),
|
||||
"light": SchemaFlowFormStep(partial(light_switch_options_schema, "light")),
|
||||
"lock": SchemaFlowFormStep(partial(basic_group_options_schema, "lock")),
|
||||
"media_player": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "media_player")
|
||||
partial(basic_group_options_schema, "media_player"),
|
||||
preview="group",
|
||||
),
|
||||
"sensor": SchemaFlowFormStep(
|
||||
partial(sensor_options_schema, "sensor"),
|
||||
preview="group_sensor",
|
||||
preview="group",
|
||||
),
|
||||
"switch": SchemaFlowFormStep(partial(light_switch_options_schema, "switch")),
|
||||
"switch": SchemaFlowFormStep(
|
||||
partial(light_switch_options_schema, "switch"),
|
||||
preview="group",
|
||||
),
|
||||
}
|
||||
|
||||
PREVIEW_OPTIONS_SCHEMA: dict[str, vol.Schema] = {}
|
||||
|
||||
CREATE_PREVIEW_ENTITY: dict[
|
||||
str,
|
||||
Callable[[str, dict[str, Any]], GroupEntity | MediaPlayerGroup],
|
||||
] = {
|
||||
"binary_sensor": async_create_preview_binary_sensor,
|
||||
"cover": async_create_preview_cover,
|
||||
"event": async_create_preview_event,
|
||||
"fan": async_create_preview_fan,
|
||||
"light": async_create_preview_light,
|
||||
"lock": async_create_preview_lock,
|
||||
"media_player": async_create_preview_media_player,
|
||||
"sensor": async_create_preview_sensor,
|
||||
"switch": async_create_preview_switch,
|
||||
}
|
||||
|
||||
|
||||
@@ -261,12 +317,20 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
)
|
||||
_async_hide_members(hass, options[CONF_ENTITIES], hidden_by)
|
||||
|
||||
@callback
|
||||
@staticmethod
|
||||
def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
async def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_preview_sensor)
|
||||
websocket_api.async_register_command(hass, ws_preview_binary_sensor)
|
||||
for group_type, form_step in OPTIONS_FLOW.items():
|
||||
if group_type not in GROUP_TYPES:
|
||||
continue
|
||||
schema = cast(
|
||||
Callable[
|
||||
[SchemaCommonFlowHandler | None], Coroutine[Any, Any, vol.Schema]
|
||||
],
|
||||
form_step.schema,
|
||||
)
|
||||
PREVIEW_OPTIONS_SCHEMA[group_type] = await schema(None)
|
||||
websocket_api.async_register_command(hass, ws_start_preview)
|
||||
|
||||
|
||||
def _async_hide_members(
|
||||
@@ -282,127 +346,50 @@ def _async_hide_members(
|
||||
registry.async_update_entity(entity_id, hidden_by=hidden_by)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "group/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@callback
|
||||
def _async_handle_ws_preview(
|
||||
def ws_start_preview(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
config_schema: vol.Schema,
|
||||
options_schema: vol.Schema,
|
||||
create_preview_entity: Callable[
|
||||
[Literal["config_flow", "options_flow"], str, dict[str, Any]],
|
||||
BinarySensorGroup | SensorGroup,
|
||||
],
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
if msg["flow_type"] == "config_flow":
|
||||
validated = config_schema(msg["user_input"])
|
||||
flow_status = hass.config_entries.flow.async_get(msg["flow_id"])
|
||||
group_type = flow_status["step_id"]
|
||||
form_step = cast(SchemaFlowFormStep, CONFIG_FLOW[group_type])
|
||||
schema = cast(vol.Schema, form_step.schema)
|
||||
validated = schema(msg["user_input"])
|
||||
name = validated["name"]
|
||||
else:
|
||||
validated = options_schema(msg["user_input"])
|
||||
flow_status = hass.config_entries.options.async_get(msg["flow_id"])
|
||||
config_entry = hass.config_entries.async_get_entry(flow_status["handler"])
|
||||
if not config_entry:
|
||||
raise HomeAssistantError
|
||||
group_type = config_entry.options["group_type"]
|
||||
name = config_entry.options["name"]
|
||||
validated = PREVIEW_OPTIONS_SCHEMA[group_type](msg["user_input"])
|
||||
|
||||
@callback
|
||||
def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"], {"state": state, "attributes": attributes}
|
||||
msg["id"], {"attributes": attributes, "state": state}
|
||||
)
|
||||
)
|
||||
|
||||
preview_entity = create_preview_entity(msg["flow_type"], name, validated)
|
||||
preview_entity = CREATE_PREVIEW_ENTITY[group_type](name, validated)
|
||||
preview_entity.hass = hass
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
connection.subscriptions[msg["id"]] = preview_entity.async_start_preview(
|
||||
async_preview_updated
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "group/binary_sensor/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_preview_binary_sensor(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
|
||||
def create_preview_binary_sensor(
|
||||
flow_type: Literal["config_flow", "options_flow"],
|
||||
name: str,
|
||||
validated_config: dict[str, Any],
|
||||
) -> BinarySensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
return BinarySensorGroup(
|
||||
None,
|
||||
name,
|
||||
None,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config[CONF_ALL],
|
||||
)
|
||||
|
||||
_async_handle_ws_preview(
|
||||
hass,
|
||||
connection,
|
||||
msg,
|
||||
BINARY_SENSOR_CONFIG_SCHEMA,
|
||||
await binary_sensor_options_schema(None),
|
||||
create_preview_binary_sensor,
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "group/sensor/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_preview_sensor(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
|
||||
def create_preview_sensor(
|
||||
flow_type: Literal["config_flow", "options_flow"],
|
||||
name: str,
|
||||
validated_config: dict[str, Any],
|
||||
) -> SensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
ignore_non_numeric = (
|
||||
False
|
||||
if flow_type == "config_flow"
|
||||
else validated_config[CONF_IGNORE_NON_NUMERIC]
|
||||
)
|
||||
return SensorGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
ignore_non_numeric,
|
||||
validated_config[CONF_TYPE],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
_async_handle_ws_preview(
|
||||
hass,
|
||||
connection,
|
||||
msg,
|
||||
SENSOR_CONFIG_SCHEMA,
|
||||
await sensor_options_schema("sensor", None),
|
||||
create_preview_sensor,
|
||||
)
|
||||
|
@@ -41,11 +41,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
from .util import attribute_equal, reduce_attribute
|
||||
@@ -100,6 +96,18 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_cover(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> CoverGroup:
|
||||
"""Create a preview sensor."""
|
||||
return CoverGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class CoverGroup(GroupEntity, CoverEntity):
|
||||
"""Representation of a CoverGroup."""
|
||||
|
||||
@@ -112,7 +120,7 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
|
||||
def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None:
|
||||
"""Initialize a CoverGroup entity."""
|
||||
self._entities = entities
|
||||
self._entity_ids = entities
|
||||
self._covers: dict[str, set[str]] = {
|
||||
KEY_OPEN_CLOSE: set(),
|
||||
KEY_STOP: set(),
|
||||
@@ -128,21 +136,11 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entities}
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@callback
|
||||
def _update_supported_features_event(
|
||||
self, event: EventType[EventStateChangedData]
|
||||
) -> None:
|
||||
self.async_set_context(event.context)
|
||||
self.async_update_supported_features(
|
||||
event.data["entity_id"], event.data["new_state"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: State | None,
|
||||
update_state: bool = True,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
if not new_state:
|
||||
@@ -150,8 +148,6 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
values.discard(entity_id)
|
||||
for values in self._tilts.values():
|
||||
values.discard(entity_id)
|
||||
if update_state:
|
||||
self.async_defer_or_update_ha_state()
|
||||
return
|
||||
|
||||
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
@@ -182,25 +178,6 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
else:
|
||||
self._tilts[KEY_POSITION].discard(entity_id)
|
||||
|
||||
if update_state:
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listeners."""
|
||||
for entity_id in self._entities:
|
||||
if (new_state := self.hass.states.get(entity_id)) is None:
|
||||
continue
|
||||
self.async_update_supported_features(
|
||||
entity_id, new_state, update_state=False
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entities, self._update_supported_features_event
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Move the covers up."""
|
||||
data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]}
|
||||
@@ -278,7 +255,7 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
|
||||
states = [
|
||||
state.state
|
||||
for entity_id in self._entities
|
||||
for entity_id in self._entity_ids
|
||||
if (state := self.hass.states.get(entity_id)) is not None
|
||||
]
|
||||
|
||||
@@ -292,7 +269,7 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
self._attr_is_closed = True
|
||||
self._attr_is_closing = False
|
||||
self._attr_is_opening = False
|
||||
for entity_id in self._entities:
|
||||
for entity_id in self._entity_ids:
|
||||
if not (state := self.hass.states.get(entity_id)):
|
||||
continue
|
||||
if state.state == STATE_OPEN:
|
||||
@@ -347,7 +324,7 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
self._attr_supported_features = supported_features
|
||||
|
||||
if not self._attr_assumed_state:
|
||||
for entity_id in self._entities:
|
||||
for entity_id in self._entity_ids:
|
||||
if (state := self.hass.states.get(entity_id)) is None:
|
||||
continue
|
||||
if state and state.attributes.get(ATTR_ASSUMED_STATE):
|
||||
|
193
homeassistant/components/group/event.py
Normal file
193
homeassistant/components/group/event.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Platform allowing several event entities to be grouped into one event."""
|
||||
from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.event import (
|
||||
ATTR_EVENT_TYPE,
|
||||
ATTR_EVENT_TYPES,
|
||||
DOMAIN,
|
||||
PLATFORM_SCHEMA,
|
||||
EventEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
CONF_ENTITIES,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
|
||||
from . import GroupEntity
|
||||
|
||||
DEFAULT_NAME = "Event group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
_: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
__: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the event group platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
EventGroup(
|
||||
config.get(CONF_UNIQUE_ID),
|
||||
config[CONF_NAME],
|
||||
config[CONF_ENTITIES],
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize event group config entry."""
|
||||
registry = er.async_get(hass)
|
||||
entities = er.async_validate_entity_ids(
|
||||
registry, config_entry.options[CONF_ENTITIES]
|
||||
)
|
||||
async_add_entities(
|
||||
[
|
||||
EventGroup(
|
||||
config_entry.entry_id,
|
||||
config_entry.title,
|
||||
entities,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_event(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> EventGroup:
|
||||
"""Create a preview sensor."""
|
||||
return EventGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class EventGroup(GroupEntity, EventEntity):
|
||||
"""Representation of an event group."""
|
||||
|
||||
_attr_available = False
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str | None,
|
||||
name: str,
|
||||
entity_ids: list[str],
|
||||
) -> None:
|
||||
"""Initialize an event group."""
|
||||
self._entity_ids = entity_ids
|
||||
self._attr_name = name
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entity_ids}
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_event_types = []
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
if not self.hass.is_running:
|
||||
return
|
||||
|
||||
self.async_set_context(event.context)
|
||||
|
||||
# Update all properties of the group
|
||||
self.async_update_group_state()
|
||||
|
||||
# Re-fire if one of the members fires an event, but only
|
||||
# if the original state was not unavailable or unknown.
|
||||
if (
|
||||
(old_state := event.data["old_state"])
|
||||
and old_state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
and (new_state := event.data["new_state"])
|
||||
and new_state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
and (event_type := new_state.attributes.get(ATTR_EVENT_TYPE))
|
||||
):
|
||||
event_attributes = new_state.attributes.copy()
|
||||
|
||||
# We should not propagate the event properties as
|
||||
# fired event attributes.
|
||||
del event_attributes[ATTR_EVENT_TYPE]
|
||||
del event_attributes[ATTR_EVENT_TYPES]
|
||||
event_attributes.pop(ATTR_DEVICE_CLASS, None)
|
||||
event_attributes.pop(ATTR_FRIENDLY_NAME, None)
|
||||
|
||||
# Fire the group event
|
||||
self._trigger_event(event_type, event_attributes)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the event group properties."""
|
||||
states = [
|
||||
state
|
||||
for entity_id in self._entity_ids
|
||||
if (state := self.hass.states.get(entity_id)) is not None
|
||||
]
|
||||
|
||||
# None of the members are available
|
||||
if not states:
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
# Gather and combine all possible event types from all entities
|
||||
self._attr_event_types = list(
|
||||
set(
|
||||
itertools.chain.from_iterable(
|
||||
state.attributes.get(ATTR_EVENT_TYPES, []) for state in states
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Set group as unavailable if all members are unavailable or missing
|
||||
self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states)
|
@@ -38,11 +38,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
from .util import (
|
||||
@@ -100,6 +96,16 @@ async def async_setup_entry(
|
||||
async_add_entities([FanGroup(config_entry.entry_id, config_entry.title, entities)])
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_fan(name: str, validated_config: dict[str, Any]) -> FanGroup:
|
||||
"""Create a preview sensor."""
|
||||
return FanGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class FanGroup(GroupEntity, FanEntity):
|
||||
"""Representation of a FanGroup."""
|
||||
|
||||
@@ -108,7 +114,7 @@ class FanGroup(GroupEntity, FanEntity):
|
||||
|
||||
def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None:
|
||||
"""Initialize a FanGroup entity."""
|
||||
self._entities = entities
|
||||
self._entity_ids = entities
|
||||
self._fans: dict[int, set[str]] = {flag: set() for flag in SUPPORTED_FLAGS}
|
||||
self._percentage = None
|
||||
self._oscillating = None
|
||||
@@ -144,21 +150,11 @@ class FanGroup(GroupEntity, FanEntity):
|
||||
"""Return whether or not the fan is currently oscillating."""
|
||||
return self._oscillating
|
||||
|
||||
@callback
|
||||
def _update_supported_features_event(
|
||||
self, event: EventType[EventStateChangedData]
|
||||
) -> None:
|
||||
self.async_set_context(event.context)
|
||||
self.async_update_supported_features(
|
||||
event.data["entity_id"], event.data["new_state"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: State | None,
|
||||
update_state: bool = True,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
if not new_state:
|
||||
@@ -172,25 +168,6 @@ class FanGroup(GroupEntity, FanEntity):
|
||||
else:
|
||||
self._fans[feature].discard(entity_id)
|
||||
|
||||
if update_state:
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listeners."""
|
||||
for entity_id in self._entities:
|
||||
if (new_state := self.hass.states.get(entity_id)) is None:
|
||||
continue
|
||||
self.async_update_supported_features(
|
||||
entity_id, new_state, update_state=False
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entities, self._update_supported_features_event
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed of the fan, as a percentage."""
|
||||
if percentage == 0:
|
||||
@@ -250,7 +227,7 @@ class FanGroup(GroupEntity, FanEntity):
|
||||
await self.hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTITY_ID: self._entities},
|
||||
{ATTR_ENTITY_ID: self._entity_ids},
|
||||
blocking=True,
|
||||
context=self._context,
|
||||
)
|
||||
@@ -275,7 +252,7 @@ class FanGroup(GroupEntity, FanEntity):
|
||||
|
||||
states = [
|
||||
state
|
||||
for entity_id in self._entities
|
||||
for entity_id in self._entity_ids
|
||||
if (state := self.hass.states.get(entity_id)) is not None
|
||||
]
|
||||
self._attr_assumed_state |= not states_equal(states)
|
||||
|
@@ -47,11 +47,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
from .util import find_state_attributes, mean_tuple, reduce_attribute
|
||||
@@ -114,6 +110,19 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_light(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> LightGroup:
|
||||
"""Create a preview sensor."""
|
||||
return LightGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config.get(CONF_ALL, False),
|
||||
)
|
||||
|
||||
|
||||
FORWARDED_ATTRIBUTES = frozenset(
|
||||
{
|
||||
ATTR_BRIGHTNESS,
|
||||
@@ -153,25 +162,6 @@ class LightGroup(GroupEntity, LightEntity):
|
||||
if mode:
|
||||
self.mode = all
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Forward the turn_on command to all lights in the light group."""
|
||||
data = {
|
||||
|
@@ -31,11 +31,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
|
||||
@@ -94,6 +90,16 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_lock(name: str, validated_config: dict[str, Any]) -> LockGroup:
|
||||
"""Create a preview sensor."""
|
||||
return LockGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class LockGroup(GroupEntity, LockEntity):
|
||||
"""Representation of a lock group."""
|
||||
|
||||
@@ -114,25 +120,6 @@ class LockGroup(GroupEntity, LockEntity):
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entity_ids}
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Forward the lock command to all locks in the group."""
|
||||
data = {ATTR_ENTITY_ID: self._entity_ids}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Platform allowing several media players to be grouped into one media player."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from collections.abc import Callable, Mapping
|
||||
from contextlib import suppress
|
||||
from typing import Any
|
||||
|
||||
@@ -44,7 +44,7 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
@@ -107,6 +107,18 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_media_player(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> MediaPlayerGroup:
|
||||
"""Create a preview sensor."""
|
||||
return MediaPlayerGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class MediaPlayerGroup(MediaPlayerEntity):
|
||||
"""Representation of a Media Group."""
|
||||
|
||||
@@ -139,7 +151,8 @@ class MediaPlayerGroup(MediaPlayerEntity):
|
||||
self.async_update_supported_features(
|
||||
event.data["entity_id"], event.data["new_state"]
|
||||
)
|
||||
self.async_update_state()
|
||||
self.async_update_group_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
@@ -208,6 +221,26 @@ class MediaPlayerGroup(MediaPlayerEntity):
|
||||
else:
|
||||
self._features[KEY_ENQUEUE].discard(entity_id)
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entities, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listeners."""
|
||||
for entity_id in self._entities:
|
||||
@@ -216,7 +249,8 @@ class MediaPlayerGroup(MediaPlayerEntity):
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entities, self.async_on_state_change
|
||||
)
|
||||
self.async_update_state()
|
||||
self.async_update_group_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -391,7 +425,7 @@ class MediaPlayerGroup(MediaPlayerEntity):
|
||||
await self.async_set_volume_level(max(0, volume_level - 0.1))
|
||||
|
||||
@callback
|
||||
def async_update_state(self) -> None:
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the media group state."""
|
||||
states = [
|
||||
state.state
|
||||
@@ -455,4 +489,3 @@ class MediaPlayerGroup(MediaPlayerEntity):
|
||||
supported_features |= MediaPlayerEntityFeature.MEDIA_ENQUEUE
|
||||
|
||||
self._attr_supported_features = supported_features
|
||||
self.async_write_ha_state()
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Platform allowing several sensors to be grouped into one sensor to provide numeric combinations."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import statistics
|
||||
@@ -33,19 +33,10 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import (
|
||||
ConfigType,
|
||||
DiscoveryInfoType,
|
||||
EventType,
|
||||
StateType,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
from . import GroupEntity
|
||||
from .const import CONF_IGNORE_NON_NUMERIC
|
||||
@@ -145,6 +136,23 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_sensor(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> SensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
return SensorGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config.get(CONF_IGNORE_NON_NUMERIC, False),
|
||||
validated_config[CONF_TYPE],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def calc_min(
|
||||
sensor_values: list[tuple[str, float, State]]
|
||||
) -> tuple[dict[str, str | None], float | None]:
|
||||
@@ -303,45 +311,6 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
self._state_incorrect: set[str] = set()
|
||||
self._extra_state_attribute: dict[str, Any] = {}
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the sensor group state."""
|
||||
|
@@ -8,6 +8,7 @@
|
||||
"menu_options": {
|
||||
"binary_sensor": "Binary sensor group",
|
||||
"cover": "Cover group",
|
||||
"event": "Event group",
|
||||
"fan": "Fan group",
|
||||
"light": "Light group",
|
||||
"lock": "Lock group",
|
||||
@@ -34,6 +35,14 @@
|
||||
"name": "[%key:component::group::config::step::binary_sensor::data::name%]"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"title": "[%key:component::group::config::step::user::title%]",
|
||||
"data": {
|
||||
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
|
||||
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
|
||||
"name": "[%key:component::group::config::step::binary_sensor::data::name%]"
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"title": "[%key:component::group::config::step::user::title%]",
|
||||
"data": {
|
||||
|
@@ -22,11 +22,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, EventType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GroupEntity
|
||||
|
||||
@@ -89,6 +85,19 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_switch(
|
||||
name: str, validated_config: dict[str, Any]
|
||||
) -> SwitchGroup:
|
||||
"""Create a preview sensor."""
|
||||
return SwitchGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config.get(CONF_ALL, False),
|
||||
)
|
||||
|
||||
|
||||
class SwitchGroup(GroupEntity, SwitchEntity):
|
||||
"""Representation of a switch group."""
|
||||
|
||||
@@ -112,25 +121,6 @@ class SwitchGroup(GroupEntity, SwitchEntity):
|
||||
if mode:
|
||||
self.mode = all
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData],
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_defer_or_update_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Forward the turn_on command to all switches in the group."""
|
||||
data = {ATTR_ENTITY_ID: self._entity_ids}
|
||||
|
@@ -505,7 +505,6 @@ def setup_platform(
|
||||
joined_path = os.path.join(gtfs_dir, sqlite_file)
|
||||
gtfs = pygtfs.Schedule(joined_path)
|
||||
|
||||
# pylint: disable=no-member
|
||||
if not gtfs.feeds:
|
||||
pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))
|
||||
|
||||
|
@@ -82,7 +82,6 @@ NO_STORE = re.compile(
|
||||
r"|app/entrypoint.js"
|
||||
r")$"
|
||||
)
|
||||
# pylint: enable=implicit-str-concat
|
||||
# fmt: on
|
||||
|
||||
RESPONSE_HEADERS_FILTER = {
|
||||
|
@@ -41,7 +41,6 @@ SCHEMA_WEBSOCKET_EVENT = vol.Schema(
|
||||
)
|
||||
|
||||
# Endpoints needed for ingress can't require admin because addons can set `panel_admin: false`
|
||||
# pylint: disable=implicit-str-concat
|
||||
# fmt: off
|
||||
WS_NO_ADMIN_ENDPOINTS = re.compile(
|
||||
r"^(?:"
|
||||
@@ -50,7 +49,6 @@ WS_NO_ADMIN_ENDPOINTS = re.compile(
|
||||
r")$" # noqa: ISC001
|
||||
)
|
||||
# fmt: on
|
||||
# pylint: enable=implicit-str-concat
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user