forked from home-assistant/core
Compare commits
66 Commits
2022.5.0b6
...
2022.5.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5bd154e87 | ||
|
|
534eef0b76 | ||
|
|
d477546e76 | ||
|
|
f88c643e1c | ||
|
|
3ee32e22c1 | ||
|
|
9f1d996d95 | ||
|
|
51ba02f141 | ||
|
|
f817caa7fc | ||
|
|
fccad81227 | ||
|
|
34cbf26e2f | ||
|
|
5b4764351d | ||
|
|
b2721d6596 | ||
|
|
b18d64fdac | ||
|
|
6abc51b363 | ||
|
|
abe1f8e862 | ||
|
|
4a7710572c | ||
|
|
dc3e421b3b | ||
|
|
2fffac02a3 | ||
|
|
46a36adf26 | ||
|
|
c54e236416 | ||
|
|
97c7d40d8a | ||
|
|
4ae596fef2 | ||
|
|
e2ae62ea95 | ||
|
|
9a4ce19aff | ||
|
|
61a3873d09 | ||
|
|
9f8111cabe | ||
|
|
aa69e7646f | ||
|
|
f2a07254a4 | ||
|
|
b8dccbbbf3 | ||
|
|
6ccd707a65 | ||
|
|
c7b24c45ba | ||
|
|
61a6d13d79 | ||
|
|
7be5eed25c | ||
|
|
5a5cde690f | ||
|
|
52333bb720 | ||
|
|
0bac48864f | ||
|
|
4196c4e81c | ||
|
|
d458ac0239 | ||
|
|
4e431274ea | ||
|
|
87a8a82040 | ||
|
|
4e331c331f | ||
|
|
deec879a4b | ||
|
|
efa931f698 | ||
|
|
d6e3325ea7 | ||
|
|
44d17a80c3 | ||
|
|
bf17bd55fd | ||
|
|
d525aad87e | ||
|
|
444a56341b | ||
|
|
aa0335408a | ||
|
|
0890f4e514 | ||
|
|
461ebcc835 | ||
|
|
269c71d2fb | ||
|
|
23738d5e91 | ||
|
|
ad5c2cdf8f | ||
|
|
a175943187 | ||
|
|
9aed63f2d8 | ||
|
|
707aa5f684 | ||
|
|
c9eca40336 | ||
|
|
989fa4274b | ||
|
|
f4f5ba93b5 | ||
|
|
d69a08bdf9 | ||
|
|
6301873d89 | ||
|
|
8252ba82d1 | ||
|
|
1f4e9effd8 | ||
|
|
1a2a061c19 | ||
|
|
19bff35437 |
@@ -1320,6 +1320,9 @@ omit =
|
||||
homeassistant/components/twitter/notify.py
|
||||
homeassistant/components/ubus/device_tracker.py
|
||||
homeassistant/components/ue_smart_radio/media_player.py
|
||||
homeassistant/components/ukraine_alarm/__init__.py
|
||||
homeassistant/components/ukraine_alarm/const.py
|
||||
homeassistant/components/ukraine_alarm/binary_sensor.py
|
||||
homeassistant/components/unifiled/*
|
||||
homeassistant/components/upb/__init__.py
|
||||
homeassistant/components/upb/const.py
|
||||
|
||||
@@ -1070,6 +1070,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/twentemilieu/ @frenck
|
||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221
|
||||
/tests/components/twinkly/ @dr1rrb @Robbie1221
|
||||
/homeassistant/components/ukraine_alarm/ @PaulAnnekov
|
||||
/tests/components/ukraine_alarm/ @PaulAnnekov
|
||||
/homeassistant/components/unifi/ @Kane610
|
||||
/tests/components/unifi/ @Kane610
|
||||
/homeassistant/components/unifiled/ @florisvdk
|
||||
|
||||
@@ -123,7 +123,7 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
||||
}
|
||||
_LOGGER.debug("update_hvac_params=%s", _params)
|
||||
try:
|
||||
await self.coordinator.airzone.put_hvac(_params)
|
||||
await self.coordinator.airzone.set_hvac_parameters(_params)
|
||||
except AirzoneError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set zone {self.name}: {error}"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Airzone",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"requirements": ["aioairzone==0.4.2"],
|
||||
"requirements": ["aioairzone==0.4.3"],
|
||||
"codeowners": ["@Noltari"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"]
|
||||
|
||||
@@ -7,6 +7,7 @@ from contextlib import asynccontextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import threading
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
@@ -30,15 +31,14 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
ENTITY_MATCH_ALL,
|
||||
ENTITY_MATCH_NONE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
HTTP_BASIC_AUTHENTICATION,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.helpers import discovery
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -144,10 +144,13 @@ class AmcrestChecker(ApiWrapper):
|
||||
self._hass = hass
|
||||
self._wrap_name = name
|
||||
self._wrap_errors = 0
|
||||
self._wrap_lock = asyncio.Lock()
|
||||
self._wrap_lock = threading.Lock()
|
||||
self._async_wrap_lock = asyncio.Lock()
|
||||
self._wrap_login_err = False
|
||||
self._wrap_event_flag = asyncio.Event()
|
||||
self._wrap_event_flag = threading.Event()
|
||||
self._wrap_event_flag.set()
|
||||
self._async_wrap_event_flag = asyncio.Event()
|
||||
self._async_wrap_event_flag.set()
|
||||
self._unsub_recheck: Callable[[], None] | None = None
|
||||
super().__init__(
|
||||
host,
|
||||
@@ -164,12 +167,18 @@ class AmcrestChecker(ApiWrapper):
|
||||
return self._wrap_errors <= MAX_ERRORS and not self._wrap_login_err
|
||||
|
||||
@property
|
||||
def available_flag(self) -> asyncio.Event:
|
||||
def available_flag(self) -> threading.Event:
|
||||
"""Return event flag that indicates if camera's API is responding."""
|
||||
return self._wrap_event_flag
|
||||
|
||||
@property
|
||||
def async_available_flag(self) -> asyncio.Event:
|
||||
"""Return event flag that indicates if camera's API is responding."""
|
||||
return self._async_wrap_event_flag
|
||||
|
||||
def _start_recovery(self) -> None:
|
||||
self._wrap_event_flag.clear()
|
||||
self.available_flag.clear()
|
||||
self.async_available_flag.clear()
|
||||
async_dispatcher_send(
|
||||
self._hass, service_signal(SERVICE_UPDATE, self._wrap_name)
|
||||
)
|
||||
@@ -177,9 +186,22 @@ class AmcrestChecker(ApiWrapper):
|
||||
self._hass, self._wrap_test_online, RECHECK_INTERVAL
|
||||
)
|
||||
|
||||
def command(self, *args: Any, **kwargs: Any) -> Any:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
try:
|
||||
ret = super().command(*args, **kwargs)
|
||||
except LoginError as ex:
|
||||
self._handle_offline(ex)
|
||||
raise
|
||||
except AmcrestError:
|
||||
self._handle_error()
|
||||
raise
|
||||
self._set_online()
|
||||
return ret
|
||||
|
||||
async def async_command(self, *args: Any, **kwargs: Any) -> httpx.Response:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with self._command_wrapper():
|
||||
async with self._async_command_wrapper():
|
||||
ret = await super().async_command(*args, **kwargs)
|
||||
return ret
|
||||
|
||||
@@ -188,35 +210,47 @@ class AmcrestChecker(ApiWrapper):
|
||||
self, *args: Any, **kwargs: Any
|
||||
) -> AsyncIterator[httpx.Response]:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with self._command_wrapper():
|
||||
async with self._async_command_wrapper():
|
||||
async with super().async_stream_command(*args, **kwargs) as ret:
|
||||
yield ret
|
||||
|
||||
@asynccontextmanager
|
||||
async def _command_wrapper(self) -> AsyncIterator[None]:
|
||||
async def _async_command_wrapper(self) -> AsyncIterator[None]:
|
||||
try:
|
||||
yield
|
||||
except LoginError as ex:
|
||||
async with self._wrap_lock:
|
||||
was_online = self.available
|
||||
was_login_err = self._wrap_login_err
|
||||
self._wrap_login_err = True
|
||||
if not was_login_err:
|
||||
_LOGGER.error("%s camera offline: Login error: %s", self._wrap_name, ex)
|
||||
if was_online:
|
||||
self._start_recovery()
|
||||
async with self._async_wrap_lock:
|
||||
self._handle_offline(ex)
|
||||
raise
|
||||
except AmcrestError:
|
||||
async with self._wrap_lock:
|
||||
was_online = self.available
|
||||
errs = self._wrap_errors = self._wrap_errors + 1
|
||||
offline = not self.available
|
||||
_LOGGER.debug("%s camera errs: %i", self._wrap_name, errs)
|
||||
if was_online and offline:
|
||||
_LOGGER.error("%s camera offline: Too many errors", self._wrap_name)
|
||||
self._start_recovery()
|
||||
async with self._async_wrap_lock:
|
||||
self._handle_error()
|
||||
raise
|
||||
async with self._wrap_lock:
|
||||
async with self._async_wrap_lock:
|
||||
self._set_online()
|
||||
|
||||
def _handle_offline(self, ex: Exception) -> None:
|
||||
with self._wrap_lock:
|
||||
was_online = self.available
|
||||
was_login_err = self._wrap_login_err
|
||||
self._wrap_login_err = True
|
||||
if not was_login_err:
|
||||
_LOGGER.error("%s camera offline: Login error: %s", self._wrap_name, ex)
|
||||
if was_online:
|
||||
self._start_recovery()
|
||||
|
||||
def _handle_error(self) -> None:
|
||||
with self._wrap_lock:
|
||||
was_online = self.available
|
||||
errs = self._wrap_errors = self._wrap_errors + 1
|
||||
offline = not self.available
|
||||
_LOGGER.debug("%s camera errs: %i", self._wrap_name, errs)
|
||||
if was_online and offline:
|
||||
_LOGGER.error("%s camera offline: Too many errors", self._wrap_name)
|
||||
self._start_recovery()
|
||||
|
||||
def _set_online(self) -> None:
|
||||
with self._wrap_lock:
|
||||
was_offline = not self.available
|
||||
self._wrap_errors = 0
|
||||
self._wrap_login_err = False
|
||||
@@ -225,7 +259,8 @@ class AmcrestChecker(ApiWrapper):
|
||||
self._unsub_recheck()
|
||||
self._unsub_recheck = None
|
||||
_LOGGER.error("%s camera back online", self._wrap_name)
|
||||
self._wrap_event_flag.set()
|
||||
self.available_flag.set()
|
||||
self.async_available_flag.set()
|
||||
async_dispatcher_send(
|
||||
self._hass, service_signal(SERVICE_UPDATE, self._wrap_name)
|
||||
)
|
||||
@@ -237,18 +272,18 @@ class AmcrestChecker(ApiWrapper):
|
||||
await self.async_current_time
|
||||
|
||||
|
||||
async def _monitor_events(
|
||||
def _monitor_events(
|
||||
hass: HomeAssistant,
|
||||
name: str,
|
||||
api: AmcrestChecker,
|
||||
event_codes: set[str],
|
||||
) -> None:
|
||||
while True:
|
||||
await api.available_flag.wait()
|
||||
api.available_flag.wait()
|
||||
try:
|
||||
async for code, payload in api.async_event_actions("All"):
|
||||
for code, payload in api.event_actions("All"):
|
||||
event_data = {"camera": name, "event": code, "payload": payload}
|
||||
hass.bus.async_fire("amcrest", event_data)
|
||||
hass.bus.fire("amcrest", event_data)
|
||||
if code in event_codes:
|
||||
signal = service_signal(SERVICE_EVENT, name, code)
|
||||
start = any(
|
||||
@@ -256,18 +291,32 @@ async def _monitor_events(
|
||||
for key, val in payload.items()
|
||||
)
|
||||
_LOGGER.debug("Sending signal: '%s': %s", signal, start)
|
||||
async_dispatcher_send(hass, signal, start)
|
||||
dispatcher_send(hass, signal, start)
|
||||
except AmcrestError as error:
|
||||
_LOGGER.warning(
|
||||
"Error while processing events from %s camera: %r", name, error
|
||||
)
|
||||
|
||||
|
||||
def _start_event_monitor(
|
||||
hass: HomeAssistant,
|
||||
name: str,
|
||||
api: AmcrestChecker,
|
||||
event_codes: set[str],
|
||||
) -> None:
|
||||
thread = threading.Thread(
|
||||
target=_monitor_events,
|
||||
name=f"Amcrest {name}",
|
||||
args=(hass, name, api, event_codes),
|
||||
daemon=True,
|
||||
)
|
||||
thread.start()
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amcrest IP Camera component."""
|
||||
hass.data.setdefault(DATA_AMCREST, {DEVICES: {}, CAMERAS: []})
|
||||
|
||||
monitor_tasks = []
|
||||
for device in config[DOMAIN]:
|
||||
name: str = device[CONF_NAME]
|
||||
username: str = device[CONF_USERNAME]
|
||||
@@ -328,9 +377,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
and sensor.event_code is not None
|
||||
}
|
||||
|
||||
monitor_tasks.append(
|
||||
asyncio.create_task(_monitor_events(hass, name, api, event_codes))
|
||||
)
|
||||
_start_event_monitor(hass, name, api, event_codes)
|
||||
|
||||
if sensors:
|
||||
hass.async_create_task(
|
||||
@@ -354,13 +401,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def cancel_monitors(event: Event) -> None:
|
||||
for monitor_task in monitor_tasks:
|
||||
monitor_task.cancel()
|
||||
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, cancel_monitors)
|
||||
|
||||
if not hass.data[DATA_AMCREST][DEVICES]:
|
||||
return False
|
||||
|
||||
|
||||
@@ -79,7 +79,8 @@ SUPPORT_APPLE_TV = (
|
||||
SUPPORT_FEATURE_MAPPING = {
|
||||
FeatureName.PlayUrl: MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA,
|
||||
FeatureName.StreamFile: MediaPlayerEntityFeature.PLAY_MEDIA,
|
||||
FeatureName.StreamFile: MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA,
|
||||
FeatureName.Pause: MediaPlayerEntityFeature.PAUSE,
|
||||
FeatureName.Play: MediaPlayerEntityFeature.PLAY,
|
||||
FeatureName.SetPosition: MediaPlayerEntityFeature.SEEK,
|
||||
@@ -282,23 +283,20 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
||||
if media_type == MEDIA_TYPE_APP:
|
||||
await self.atv.apps.launch_app(media_id)
|
||||
|
||||
is_media_source_id = media_source.is_media_source_id(media_id)
|
||||
if media_source.is_media_source_id(media_id):
|
||||
play_item = await media_source.async_resolve_media(self.hass, media_id)
|
||||
media_id = play_item.url
|
||||
media_type = MEDIA_TYPE_MUSIC
|
||||
|
||||
if (
|
||||
not is_media_source_id
|
||||
and self._is_feature_available(FeatureName.StreamFile)
|
||||
and (await is_streamable(media_id) or media_type == MEDIA_TYPE_MUSIC)
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
if self._is_feature_available(FeatureName.StreamFile) and (
|
||||
media_type == MEDIA_TYPE_MUSIC or await is_streamable(media_id)
|
||||
):
|
||||
_LOGGER.debug("Streaming %s via RAOP", media_id)
|
||||
await self.atv.stream.stream_file(media_id)
|
||||
|
||||
if self._is_feature_available(FeatureName.PlayUrl):
|
||||
if is_media_source_id:
|
||||
play_item = await media_source.async_resolve_media(self.hass, media_id)
|
||||
media_id = play_item.url
|
||||
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
elif self._is_feature_available(FeatureName.PlayUrl):
|
||||
_LOGGER.debug("Playing %s via AirPlay", media_id)
|
||||
await self.atv.stream.play_url(media_id)
|
||||
else:
|
||||
@@ -397,9 +395,12 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
||||
media_content_id=None,
|
||||
) -> BrowseMedia:
|
||||
"""Implement the websocket media browsing helper."""
|
||||
# If we can't stream URLs, we can't browse media.
|
||||
# In that case the `BROWSE_MEDIA` feature was added because of AppList/LaunchApp
|
||||
if not self._is_feature_available(FeatureName.PlayUrl):
|
||||
if media_content_id == "apps" or (
|
||||
# If we can't stream files or URLs, we can't browse media.
|
||||
# In that case the `BROWSE_MEDIA` feature was added because of AppList/LaunchApp
|
||||
not self._is_feature_available(FeatureName.PlayUrl)
|
||||
and not self._is_feature_available(FeatureName.StreamFile)
|
||||
):
|
||||
return build_app_list(self._app_list)
|
||||
|
||||
if self._app_list:
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
from brother import Brother, DictToObj, SnmpError, UnsupportedModel
|
||||
import pysnmp.hlapi.asyncio as SnmpEngine
|
||||
|
||||
@@ -76,7 +77,8 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
async def _async_update_data(self) -> DictToObj:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
data = await self.brother.async_update()
|
||||
async with async_timeout.timeout(20):
|
||||
data = await self.brother.async_update()
|
||||
except (ConnectionError, SnmpError, UnsupportedModel) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
return data
|
||||
|
||||
@@ -144,10 +144,11 @@ class CanaryCamera(CoordinatorEntity[CanaryDataUpdateCoordinator], Camera):
|
||||
if self._live_stream_session is None:
|
||||
return None
|
||||
|
||||
stream = CameraMjpeg(self._ffmpeg.binary)
|
||||
await stream.open_camera(
|
||||
self._live_stream_session.live_stream_url, extra_cmd=self._ffmpeg_arguments
|
||||
live_stream_url = await self.hass.async_add_executor_job(
|
||||
getattr, self._live_stream_session, "live_stream_url"
|
||||
)
|
||||
stream = CameraMjpeg(self._ffmpeg.binary)
|
||||
await stream.open_camera(live_stream_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
try:
|
||||
stream_reader = await stream.get_reader()
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "canary",
|
||||
"name": "Canary",
|
||||
"documentation": "https://www.home-assistant.io/integrations/canary",
|
||||
"requirements": ["py-canary==0.5.1"],
|
||||
"requirements": ["py-canary==0.5.2"],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -58,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Cast from a config entry."""
|
||||
await home_assistant_cast.async_setup_ha_cast(hass, entry)
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN] = {"cast_platform": {}, "unknown_models": {}}
|
||||
await async_process_integration_platforms(hass, DOMAIN, _register_cast_platform)
|
||||
return True
|
||||
|
||||
@@ -107,7 +107,7 @@ async def _register_cast_platform(
|
||||
or not hasattr(platform, "async_play_media")
|
||||
):
|
||||
raise HomeAssistantError(f"Invalid cast platform {platform}")
|
||||
hass.data[DOMAIN][integration_domain] = platform
|
||||
hass.data[DOMAIN]["cast_platform"][integration_domain] = platform
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
|
||||
@@ -34,7 +34,7 @@ def discover_chromecast(
|
||||
_LOGGER.error("Discovered chromecast without uuid %s", info)
|
||||
return
|
||||
|
||||
info = info.fill_out_missing_chromecast_info()
|
||||
info = info.fill_out_missing_chromecast_info(hass)
|
||||
_LOGGER.debug("Discovered new or updated chromecast %s", info)
|
||||
|
||||
dispatcher_send(hass, SIGNAL_CAST_DISCOVERED, info)
|
||||
|
||||
@@ -15,8 +15,11 @@ from pychromecast import dial
|
||||
from pychromecast.const import CAST_TYPE_GROUP
|
||||
from pychromecast.models import CastInfo
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_PLS_SECTION_PLAYLIST = "playlist"
|
||||
@@ -47,18 +50,50 @@ class ChromecastInfo:
|
||||
"""Return the UUID."""
|
||||
return self.cast_info.uuid
|
||||
|
||||
def fill_out_missing_chromecast_info(self) -> ChromecastInfo:
|
||||
def fill_out_missing_chromecast_info(self, hass: HomeAssistant) -> ChromecastInfo:
|
||||
"""Return a new ChromecastInfo object with missing attributes filled in.
|
||||
|
||||
Uses blocking HTTP / HTTPS.
|
||||
"""
|
||||
cast_info = self.cast_info
|
||||
if self.cast_info.cast_type is None or self.cast_info.manufacturer is None:
|
||||
# Manufacturer and cast type is not available in mDNS data, get it over http
|
||||
cast_info = dial.get_cast_type(
|
||||
cast_info,
|
||||
zconf=ChromeCastZeroconf.get_zeroconf(),
|
||||
)
|
||||
unknown_models = hass.data[DOMAIN]["unknown_models"]
|
||||
if self.cast_info.model_name not in unknown_models:
|
||||
# Manufacturer and cast type is not available in mDNS data, get it over http
|
||||
cast_info = dial.get_cast_type(
|
||||
cast_info,
|
||||
zconf=ChromeCastZeroconf.get_zeroconf(),
|
||||
)
|
||||
unknown_models[self.cast_info.model_name] = (
|
||||
cast_info.cast_type,
|
||||
cast_info.manufacturer,
|
||||
)
|
||||
|
||||
report_issue = (
|
||||
"create a bug report at "
|
||||
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue"
|
||||
"+label%3A%22integration%3A+cast%22"
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
"Fetched cast details for unknown model '%s' manufacturer: '%s', type: '%s'. Please %s",
|
||||
cast_info.model_name,
|
||||
cast_info.manufacturer,
|
||||
cast_info.cast_type,
|
||||
report_issue,
|
||||
)
|
||||
else:
|
||||
cast_type, manufacturer = unknown_models[self.cast_info.model_name]
|
||||
cast_info = CastInfo(
|
||||
cast_info.services,
|
||||
cast_info.uuid,
|
||||
cast_info.model_name,
|
||||
cast_info.friendly_name,
|
||||
cast_info.host,
|
||||
cast_info.port,
|
||||
cast_type,
|
||||
manufacturer,
|
||||
)
|
||||
|
||||
if not self.is_audio_group or self.is_dynamic_group is not None:
|
||||
# We have all information, no need to check HTTP API.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Google Cast",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"requirements": ["pychromecast==12.0.0"],
|
||||
"requirements": ["pychromecast==12.1.1"],
|
||||
"after_dependencies": [
|
||||
"cloud",
|
||||
"http",
|
||||
|
||||
@@ -535,7 +535,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
"""Generate root node."""
|
||||
children = []
|
||||
# Add media browsers
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
children.extend(
|
||||
await platform.async_get_media_browser_root_object(
|
||||
self.hass, self._chromecast.cast_type
|
||||
@@ -587,7 +587,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
if media_content_id is None:
|
||||
return await self._async_root_payload(content_filter)
|
||||
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
browse_media = await platform.async_browse_media(
|
||||
self.hass,
|
||||
media_content_type,
|
||||
@@ -646,7 +646,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
return
|
||||
|
||||
# Try the cast platforms
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
result = await platform.async_play_media(
|
||||
self.hass, self.entity_id, self._chromecast, media_type, media_id
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "compensation",
|
||||
"name": "Compensation",
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"requirements": ["numpy==1.21.4"],
|
||||
"requirements": ["numpy==1.21.6"],
|
||||
"codeowners": ["@Petro31"],
|
||||
"iot_class": "calculated"
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from urllib.parse import urlparse
|
||||
from devolo_home_control_api.devices.zwave import Zwave
|
||||
from devolo_home_control_api.homecontrol import HomeControl
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -71,7 +72,11 @@ class DevoloDeviceEntity(Entity):
|
||||
|
||||
def _generic_message(self, message: tuple) -> None:
|
||||
"""Handle generic messages."""
|
||||
if len(message) == 3 and message[2] == "battery_level":
|
||||
if (
|
||||
len(message) == 3
|
||||
and message[2] == "battery_level"
|
||||
and self.device_class == SensorDeviceClass.BATTERY
|
||||
):
|
||||
self._value = message[1]
|
||||
elif len(message) == 3 and message[2] == "status":
|
||||
# Maybe the API wants to tell us, that the device went on- or offline.
|
||||
|
||||
@@ -4,7 +4,9 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_network",
|
||||
"requirements": ["devolo-plc-api==0.7.1"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."],
|
||||
"zeroconf": [
|
||||
{ "type": "_dvl-deviceapi._tcp.local.", "properties": { "MT": "*" } }
|
||||
],
|
||||
"codeowners": ["@2Fake", "@Shutgun"],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "local_polling",
|
||||
|
||||
@@ -10,10 +10,7 @@ from fiblary3.client.v4.client import (
|
||||
Client as FibaroClientV4,
|
||||
StateHandler as StateHandlerV4,
|
||||
)
|
||||
from fiblary3.client.v5.client import (
|
||||
Client as FibaroClientV5,
|
||||
StateHandler as StateHandlerV5,
|
||||
)
|
||||
from fiblary3.client.v5.client import StateHandler as StateHandlerV5
|
||||
from fiblary3.common.exceptions import HTTPException
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -141,18 +138,12 @@ class FibaroController:
|
||||
should do that only when you use the FibaroController for login test as only
|
||||
the login and info API's are equal throughout the different versions.
|
||||
"""
|
||||
if (
|
||||
serial_number is None
|
||||
or serial_number.upper().startswith("HC2")
|
||||
or serial_number.upper().startswith("HCL")
|
||||
):
|
||||
self._client = FibaroClientV4(
|
||||
config[CONF_URL], config[CONF_USERNAME], config[CONF_PASSWORD]
|
||||
)
|
||||
else:
|
||||
self._client = FibaroClientV5(
|
||||
config[CONF_URL], config[CONF_USERNAME], config[CONF_PASSWORD]
|
||||
)
|
||||
|
||||
# Only use V4 API as it works better even for HC3, after the library is fixed, we should
|
||||
# add here support for the newer library version V5 again.
|
||||
self._client = FibaroClientV4(
|
||||
config[CONF_URL], config[CONF_USERNAME], config[CONF_PASSWORD]
|
||||
)
|
||||
|
||||
self._scene_map = None
|
||||
# Whether to import devices from plugins
|
||||
|
||||
@@ -185,6 +185,6 @@ class FibaroLight(FibaroDevice, LightEntity):
|
||||
rgbw_list = [int(i) for i in rgbw_s.split(",")][:4]
|
||||
|
||||
if self._attr_color_mode == ColorMode.RGB:
|
||||
self._attr_rgb_color = tuple(*rgbw_list[:3])
|
||||
self._attr_rgb_color = tuple(rgbw_list[:3])
|
||||
else:
|
||||
self._attr_rgbw_color = tuple(rgbw_list)
|
||||
|
||||
@@ -6,7 +6,11 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
|
||||
from homeassistant.components.climate.const import ClimateEntityFeature, HVACMode
|
||||
from homeassistant.components.climate.const import (
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.components.modbus import get_hub
|
||||
from homeassistant.components.modbus.const import (
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
@@ -69,9 +73,7 @@ class Flexit(ClimateEntity):
|
||||
self._target_temperature = None
|
||||
self._current_temperature = None
|
||||
self._current_fan_mode = None
|
||||
self._current_operation = None
|
||||
self._fan_modes = ["Off", "Low", "Medium", "High"]
|
||||
self._current_operation = None
|
||||
self._filter_hours = None
|
||||
self._filter_alarm = None
|
||||
self._heat_recovery = None
|
||||
@@ -124,15 +126,15 @@ class Flexit(ClimateEntity):
|
||||
)
|
||||
|
||||
if self._heating:
|
||||
self._current_operation = "Heating"
|
||||
self._attr_hvac_action = HVACAction.HEATING
|
||||
elif self._cooling:
|
||||
self._current_operation = "Cooling"
|
||||
self._attr_hvac_action = HVACAction.COOLING
|
||||
elif self._heat_recovery:
|
||||
self._current_operation = "Recovering"
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
elif actual_air_speed:
|
||||
self._current_operation = "Fan Only"
|
||||
self._attr_hvac_action = HVACAction.FAN
|
||||
else:
|
||||
self._current_operation = "Off"
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
@@ -175,7 +177,7 @@ class Flexit(ClimateEntity):
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
"""Return current operation ie. heat, cool, idle."""
|
||||
return self._current_operation
|
||||
return HVACMode.COOL
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[str]:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20220502.0"],
|
||||
"requirements": ["home-assistant-frontend==20220504.1"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Glances",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/glances",
|
||||
"requirements": ["glances_api==0.3.4"],
|
||||
"requirements": ["glances_api==0.3.5"],
|
||||
"codeowners": ["@engrbm87"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["glances_api"]
|
||||
|
||||
@@ -30,7 +30,6 @@ async def system_health_info(hass: HomeAssistant):
|
||||
healthy = {
|
||||
"type": "failed",
|
||||
"error": "Unhealthy",
|
||||
"more_info": "/hassio/system",
|
||||
}
|
||||
|
||||
if supervisor_info.get("supported"):
|
||||
@@ -39,7 +38,6 @@ async def system_health_info(hass: HomeAssistant):
|
||||
supported = {
|
||||
"type": "failed",
|
||||
"error": "Unsupported",
|
||||
"more_info": "/hassio/system",
|
||||
}
|
||||
|
||||
information = {
|
||||
@@ -63,7 +61,6 @@ async def system_health_info(hass: HomeAssistant):
|
||||
information["version_api"] = system_health.async_check_can_reach_url(
|
||||
hass,
|
||||
f"https://version.home-assistant.io/{info.get('channel')}.json",
|
||||
"/hassio/system",
|
||||
)
|
||||
|
||||
information["installed_addons"] = ", ".join(
|
||||
|
||||
@@ -467,7 +467,7 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
entity_filter = self.hk_options.get(CONF_FILTER, {})
|
||||
entities = entity_filter.get(CONF_INCLUDE_ENTITIES, [])
|
||||
all_supported_entities = _async_get_matching_entities(
|
||||
self.hass, domains, include_entity_category=True
|
||||
self.hass, domains, include_entity_category=True, include_hidden=True
|
||||
)
|
||||
# In accessory mode we can only have one
|
||||
default_value = next(
|
||||
@@ -508,7 +508,7 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
entities = entity_filter.get(CONF_INCLUDE_ENTITIES, [])
|
||||
|
||||
all_supported_entities = _async_get_matching_entities(
|
||||
self.hass, domains, include_entity_category=True
|
||||
self.hass, domains, include_entity_category=True, include_hidden=True
|
||||
)
|
||||
if not entities:
|
||||
entities = entity_filter.get(CONF_EXCLUDE_ENTITIES, [])
|
||||
@@ -646,12 +646,13 @@ def _exclude_by_entity_registry(
|
||||
ent_reg: entity_registry.EntityRegistry,
|
||||
entity_id: str,
|
||||
include_entity_category: bool,
|
||||
include_hidden: bool,
|
||||
) -> bool:
|
||||
"""Filter out hidden entities and ones with entity category (unless specified)."""
|
||||
return bool(
|
||||
(entry := ent_reg.async_get(entity_id))
|
||||
and (
|
||||
entry.hidden_by is not None
|
||||
(not include_hidden and entry.hidden_by is not None)
|
||||
or (not include_entity_category and entry.entity_category is not None)
|
||||
)
|
||||
)
|
||||
@@ -661,6 +662,7 @@ def _async_get_matching_entities(
|
||||
hass: HomeAssistant,
|
||||
domains: list[str] | None = None,
|
||||
include_entity_category: bool = False,
|
||||
include_hidden: bool = False,
|
||||
) -> dict[str, str]:
|
||||
"""Fetch all entities or entities in the given domains."""
|
||||
ent_reg = entity_registry.async_get(hass)
|
||||
@@ -671,7 +673,7 @@ def _async_get_matching_entities(
|
||||
key=lambda item: item.entity_id,
|
||||
)
|
||||
if not _exclude_by_entity_registry(
|
||||
ent_reg, state.entity_id, include_entity_category
|
||||
ent_reg, state.entity_id, include_entity_category, include_hidden
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ async def async_get_device_config(hass, config_entry):
|
||||
with suppress(AttributeError):
|
||||
await devices[address].async_status()
|
||||
|
||||
load_aldb = devices.modem.aldb.read_write_mode == ReadWriteMode.UNKNOWN
|
||||
load_aldb = 2 if devices.modem.aldb.read_write_mode == ReadWriteMode.UNKNOWN else 1
|
||||
await devices.async_load(id_devices=1, load_modem_aldb=load_aldb)
|
||||
for addr in devices:
|
||||
device = devices[addr]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "IQVIA",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/iqvia",
|
||||
"requirements": ["numpy==1.21.4", "pyiqvia==2022.04.0"],
|
||||
"requirements": ["numpy==1.21.6", "pyiqvia==2022.04.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyiqvia"]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "KNX",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/knx",
|
||||
"requirements": ["xknx==0.21.1"],
|
||||
"requirements": ["xknx==0.21.2"],
|
||||
"codeowners": ["@Julius2342", "@farmio", "@marvin-w"],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -38,6 +38,7 @@ from .const import (
|
||||
CONF_CA_CERTS,
|
||||
CONF_CERTFILE,
|
||||
CONF_KEYFILE,
|
||||
CONFIG_URL,
|
||||
DOMAIN,
|
||||
LUTRON_CASETA_BUTTON_EVENT,
|
||||
MANUFACTURER,
|
||||
@@ -306,13 +307,15 @@ class LutronCasetaDevice(Entity):
|
||||
self._device = device
|
||||
self._smartbridge = bridge
|
||||
self._bridge_device = bridge_device
|
||||
if "serial" not in self._device:
|
||||
return
|
||||
info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.serial)},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=f"{device['model']} ({device['type']})",
|
||||
name=self.name,
|
||||
via_device=(DOMAIN, self._bridge_device["serial"]),
|
||||
configuration_url="https://device-login.lutron.com",
|
||||
configuration_url=CONFIG_URL,
|
||||
)
|
||||
area, _ = _area_and_name_from_name(device["name"])
|
||||
if area != UNASSIGNED_AREA:
|
||||
|
||||
@@ -6,11 +6,14 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_SUGGESTED_AREA
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice
|
||||
from .const import BRIDGE_DEVICE, BRIDGE_LEAP
|
||||
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice, _area_and_name_from_name
|
||||
from .const import BRIDGE_DEVICE, BRIDGE_LEAP, CONFIG_URL, MANUFACTURER, UNASSIGNED_AREA
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -39,6 +42,23 @@ async def async_setup_entry(
|
||||
class LutronOccupancySensor(LutronCasetaDevice, BinarySensorEntity):
|
||||
"""Representation of a Lutron occupancy group."""
|
||||
|
||||
def __init__(self, device, bridge, bridge_device):
|
||||
"""Init an occupancy sensor."""
|
||||
super().__init__(device, bridge, bridge_device)
|
||||
info = DeviceInfo(
|
||||
identifiers={(CASETA_DOMAIN, self.unique_id)},
|
||||
manufacturer=MANUFACTURER,
|
||||
model="Lutron Occupancy",
|
||||
name=self.name,
|
||||
via_device=(CASETA_DOMAIN, self._bridge_device["serial"]),
|
||||
configuration_url=CONFIG_URL,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
area, _ = _area_and_name_from_name(device["name"])
|
||||
if area != UNASSIGNED_AREA:
|
||||
info[ATTR_SUGGESTED_AREA] = area
|
||||
self._attr_device_info = info
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Flag supported features."""
|
||||
@@ -65,16 +85,6 @@ class LutronOccupancySensor(LutronCasetaDevice, BinarySensorEntity):
|
||||
"""Return a unique identifier."""
|
||||
return f"occupancygroup_{self.device_id}"
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info.
|
||||
|
||||
Sensor entities are aggregated from one or more physical
|
||||
sensors by each room. Therefore, there shouldn't be devices
|
||||
related to any sensor entities.
|
||||
"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
|
||||
@@ -35,3 +35,5 @@ CONF_SUBTYPE = "subtype"
|
||||
BRIDGE_TIMEOUT = 35
|
||||
|
||||
UNASSIGNED_AREA = "Unassigned"
|
||||
|
||||
CONFIG_URL = "https://device-login.lutron.com"
|
||||
|
||||
@@ -27,15 +27,18 @@ from .const import DOMAIN
|
||||
|
||||
|
||||
@dataclass
|
||||
class MeaterSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes meater sensor entity."""
|
||||
class MeaterSensorEntityDescriptionMixin:
|
||||
"""Mixin for MeaterSensorEntityDescription."""
|
||||
|
||||
available: Callable[
|
||||
[MeaterProbe | None], bool | type[NotImplementedError]
|
||||
] = lambda x: NotImplementedError
|
||||
value: Callable[
|
||||
[MeaterProbe], datetime | float | str | None | type[NotImplementedError]
|
||||
] = lambda x: NotImplementedError
|
||||
available: Callable[[MeaterProbe | None], bool]
|
||||
value: Callable[[MeaterProbe], datetime | float | str | None]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MeaterSensorEntityDescription(
|
||||
SensorEntityDescription, MeaterSensorEntityDescriptionMixin
|
||||
):
|
||||
"""Describes meater sensor entity."""
|
||||
|
||||
|
||||
def _elapsed_time_to_timestamp(probe: MeaterProbe) -> datetime | None:
|
||||
@@ -108,7 +111,8 @@ SENSOR_TYPES = (
|
||||
available=lambda probe: probe is not None and probe.cook is not None,
|
||||
value=lambda probe: probe.cook.peak_temperature if probe.cook else None,
|
||||
),
|
||||
# Time since the start of cook in seconds. Default: 0.
|
||||
# Remaining time in seconds. When unknown/calculating default is used. Default: -1
|
||||
# Exposed as a TIMESTAMP sensor where the timestamp is current time + remaining time.
|
||||
MeaterSensorEntityDescription(
|
||||
key="cook_time_remaining",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
@@ -116,7 +120,8 @@ SENSOR_TYPES = (
|
||||
available=lambda probe: probe is not None and probe.cook is not None,
|
||||
value=_remaining_time_to_timestamp,
|
||||
),
|
||||
# Remaining time in seconds. When unknown/calculating default is used. Default: -1
|
||||
# Time since the start of cook in seconds. Default: 0. Exposed as a TIMESTAMP sensor
|
||||
# where the timestamp is current time - elapsed time.
|
||||
MeaterSensorEntityDescription(
|
||||
key="cook_time_elapsed",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
@@ -141,7 +146,7 @@ async def async_setup_entry(
|
||||
if not coordinator.last_update_success:
|
||||
return
|
||||
|
||||
devices = coordinator.data
|
||||
devices: dict[str, MeaterProbe] = coordinator.data
|
||||
entities = []
|
||||
known_probes: set = hass.data[DOMAIN]["known_probes"]
|
||||
|
||||
|
||||
@@ -31,6 +31,15 @@ def valid_topic(value: Any) -> str:
|
||||
)
|
||||
if "\0" in value:
|
||||
raise vol.Invalid("MQTT topic name/filter must not contain null character.")
|
||||
if any(char <= "\u001F" for char in value):
|
||||
raise vol.Invalid("MQTT topic name/filter must not contain control characters.")
|
||||
if any("\u007f" <= char <= "\u009F" for char in value):
|
||||
raise vol.Invalid("MQTT topic name/filter must not contain control characters.")
|
||||
if any("\ufdd0" <= char <= "\ufdef" for char in value):
|
||||
raise vol.Invalid("MQTT topic name/filter must not contain non-characters.")
|
||||
if any((ord(char) & 0xFFFF) in (0xFFFE, 0xFFFF) for char in value):
|
||||
raise vol.Invalid("MQTT topic name/filter must not contain noncharacters.")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Nettigo Air Monitor",
|
||||
"documentation": "https://www.home-assistant.io/integrations/nam",
|
||||
"codeowners": ["@bieniu"],
|
||||
"requirements": ["nettigo-air-monitor==1.2.2"],
|
||||
"requirements": ["nettigo-air-monitor==1.2.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.util.network import is_ipv4_address
|
||||
|
||||
from .const import (
|
||||
CONF_CONSIDER_HOME,
|
||||
@@ -129,6 +130,9 @@ class NetgearFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
hostname = cast(str, hostname)
|
||||
updated_data[CONF_HOST] = hostname
|
||||
|
||||
if not is_ipv4_address(str(hostname)):
|
||||
return self.async_abort(reason="not_ipv4_address")
|
||||
|
||||
_LOGGER.debug("Netgear ssdp discovery info: %s", discovery_info)
|
||||
|
||||
await self.async_set_unique_id(discovery_info.upnp[ssdp.ATTR_UPNP_SERIAL])
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "netgear",
|
||||
"name": "NETGEAR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
||||
"requirements": ["pynetgear==0.9.4"],
|
||||
"requirements": ["pynetgear==0.10.0"],
|
||||
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
||||
"iot_class": "local_polling",
|
||||
"config_flow": true,
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/oncue",
|
||||
"requirements": ["aiooncue==0.3.2"],
|
||||
"requirements": ["aiooncue==0.3.4"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiooncue"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "opencv",
|
||||
"name": "OpenCV",
|
||||
"documentation": "https://www.home-assistant.io/integrations/opencv",
|
||||
"requirements": ["numpy==1.21.4", "opencv-python-headless==4.5.2.54"],
|
||||
"requirements": ["numpy==1.21.6", "opencv-python-headless==4.5.2.54"],
|
||||
"codeowners": [],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.components import cloud
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_CLOUDHOOK_URL, CONF_MANUAL_RUN_MINS, CONF_WEBHOOK_ID, DOMAIN
|
||||
@@ -73,6 +73,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Get the API user
|
||||
try:
|
||||
await person.async_setup(hass)
|
||||
except ConfigEntryAuthFailed as error:
|
||||
# Reauth is not yet implemented
|
||||
_LOGGER.error("Authentication failed: %s", error)
|
||||
return False
|
||||
except ConnectTimeout as error:
|
||||
_LOGGER.error("Could not reach the Rachio API: %s", error)
|
||||
raise ConfigEntryNotReady from error
|
||||
|
||||
@@ -8,6 +8,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
@@ -125,12 +126,18 @@ class RachioPerson:
|
||||
rachio = self.rachio
|
||||
|
||||
response = rachio.person.info()
|
||||
assert int(response[0][KEY_STATUS]) == HTTPStatus.OK, "API key error"
|
||||
if is_invalid_auth_code(int(response[0][KEY_STATUS])):
|
||||
raise ConfigEntryAuthFailed(f"API key error: {response}")
|
||||
if int(response[0][KEY_STATUS]) != HTTPStatus.OK:
|
||||
raise ConfigEntryNotReady(f"API Error: {response}")
|
||||
self._id = response[1][KEY_ID]
|
||||
|
||||
# Use user ID to get user data
|
||||
data = rachio.person.get(self._id)
|
||||
assert int(data[0][KEY_STATUS]) == HTTPStatus.OK, "User ID error"
|
||||
if is_invalid_auth_code(int(data[0][KEY_STATUS])):
|
||||
raise ConfigEntryAuthFailed(f"User ID error: {data}")
|
||||
if int(data[0][KEY_STATUS]) != HTTPStatus.OK:
|
||||
raise ConfigEntryNotReady(f"API Error: {data}")
|
||||
self.username = data[1][KEY_USERNAME]
|
||||
devices = data[1][KEY_DEVICES]
|
||||
for controller in devices:
|
||||
@@ -297,3 +304,11 @@ class RachioIro:
|
||||
"""Resume paused watering on this controller."""
|
||||
self.rachio.device.resume_zone_run(self.controller_id)
|
||||
_LOGGER.debug("Resuming watering on %s", self)
|
||||
|
||||
|
||||
def is_invalid_auth_code(http_status_code):
|
||||
"""HTTP status codes that mean invalid auth."""
|
||||
if http_status_code in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -79,6 +79,7 @@ from .const import (
|
||||
)
|
||||
from .executor import DBInterruptibleThreadPoolExecutor
|
||||
from .models import (
|
||||
SCHEMA_VERSION,
|
||||
Base,
|
||||
Events,
|
||||
StateAttributes,
|
||||
@@ -634,6 +635,7 @@ class Recorder(threading.Thread):
|
||||
self.entity_filter = entity_filter
|
||||
self.exclude_t = exclude_t
|
||||
|
||||
self.schema_version = 0
|
||||
self._commits_without_expire = 0
|
||||
self._old_states: dict[str, States] = {}
|
||||
self._state_attributes_ids: LRU = LRU(STATE_ATTRIBUTES_ID_CACHE_SIZE)
|
||||
@@ -973,6 +975,8 @@ class Recorder(threading.Thread):
|
||||
self.hass.add_job(self.async_connection_failed)
|
||||
return
|
||||
|
||||
self.schema_version = current_version
|
||||
|
||||
schema_is_current = migration.schema_is_current(current_version)
|
||||
if schema_is_current:
|
||||
self._setup_run()
|
||||
@@ -994,6 +998,7 @@ class Recorder(threading.Thread):
|
||||
# with startup which is also cpu intensive
|
||||
if not schema_is_current:
|
||||
if self._migrate_schema_and_setup_run(current_version):
|
||||
self.schema_version = SCHEMA_VERSION
|
||||
if not self._event_listener:
|
||||
# If the schema migration takes so long that the end
|
||||
# queue watcher safety kicks in because MAX_QUEUE_BACKLOG
|
||||
|
||||
@@ -116,7 +116,7 @@ def query_and_join_attributes(
|
||||
# If we in the process of migrating schema we do
|
||||
# not want to join the state_attributes table as we
|
||||
# do not know if it will be there yet
|
||||
if recorder.get_instance(hass).migration_in_progress:
|
||||
if recorder.get_instance(hass).schema_version < 25:
|
||||
return QUERY_STATES_PRE_SCHEMA_25, False
|
||||
# Finally if no migration is in progress and no_attributes
|
||||
# was not requested, we query both attributes columns and
|
||||
@@ -146,7 +146,7 @@ def bake_query_and_join_attributes(
|
||||
# If we in the process of migrating schema we do
|
||||
# not want to join the state_attributes table as we
|
||||
# do not know if it will be there yet
|
||||
if recorder.get_instance(hass).migration_in_progress:
|
||||
if recorder.get_instance(hass).schema_version < 25:
|
||||
if include_last_updated:
|
||||
return (
|
||||
bakery(lambda session: session.query(*QUERY_STATES_PRE_SCHEMA_25)),
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
"name": "SABnzbd",
|
||||
"documentation": "https://www.home-assistant.io/integrations/sabnzbd",
|
||||
"requirements": ["pysabnzbd==1.1.1"],
|
||||
"dependencies": ["configurator"],
|
||||
"after_dependencies": ["discovery"],
|
||||
"codeowners": ["@shaiu"],
|
||||
"iot_class": "local_polling",
|
||||
"config_flow": true,
|
||||
|
||||
@@ -14,8 +14,10 @@ from . import DOMAIN, SIGNAL_SABNZBD_UPDATED
|
||||
from ...config_entries import ConfigEntry
|
||||
from ...const import DATA_GIGABYTES, DATA_MEGABYTES, DATA_RATE_MEGABYTES_PER_SECOND
|
||||
from ...core import HomeAssistant
|
||||
from ...helpers.device_registry import DeviceEntryType
|
||||
from ...helpers.entity import DeviceInfo
|
||||
from ...helpers.entity_platform import AddEntitiesCallback
|
||||
from .const import KEY_API_DATA, KEY_NAME
|
||||
from .const import DEFAULT_NAME, KEY_API_DATA, KEY_NAME
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -30,13 +32,15 @@ class SabnzbdSensorEntityDescription(SensorEntityDescription, SabnzbdRequiredKey
|
||||
"""Describes Sabnzbd sensor entity."""
|
||||
|
||||
|
||||
SPEED_KEY = "kbpersec"
|
||||
|
||||
SENSOR_TYPES: tuple[SabnzbdSensorEntityDescription, ...] = (
|
||||
SabnzbdSensorEntityDescription(
|
||||
key="status",
|
||||
name="Status",
|
||||
),
|
||||
SabnzbdSensorEntityDescription(
|
||||
key="kbpersec",
|
||||
key=SPEED_KEY,
|
||||
name="Speed",
|
||||
native_unit_of_measurement=DATA_RATE_MEGABYTES_PER_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
@@ -127,9 +131,16 @@ class SabnzbdSensor(SensorEntity):
|
||||
self, sabnzbd_api_data, client_name, description: SabnzbdSensorEntityDescription
|
||||
):
|
||||
"""Initialize the sensor."""
|
||||
unique_id = description.key
|
||||
self._attr_unique_id = unique_id
|
||||
self.entity_description = description
|
||||
self._sabnzbd_api = sabnzbd_api_data
|
||||
self._attr_name = f"{client_name} {description.name}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, DOMAIN)},
|
||||
name=DEFAULT_NAME,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Call when entity about to be added to hass."""
|
||||
@@ -145,7 +156,7 @@ class SabnzbdSensor(SensorEntity):
|
||||
self.entity_description.key
|
||||
)
|
||||
|
||||
if self.entity_description.key == "speed":
|
||||
if self.entity_description.key == SPEED_KEY:
|
||||
self._attr_native_value = round(float(self._attr_native_value) / 1024, 1)
|
||||
elif "size" in self.entity_description.key:
|
||||
self._attr_native_value = round(float(self._attr_native_value), 2)
|
||||
|
||||
@@ -30,7 +30,12 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .bridge import SamsungTVBridge, async_get_device_info, mac_from_device_info
|
||||
from .bridge import (
|
||||
SamsungTVBridge,
|
||||
async_get_device_info,
|
||||
mac_from_device_info,
|
||||
model_requires_encryption,
|
||||
)
|
||||
from .const import (
|
||||
CONF_ON_ACTION,
|
||||
CONF_SESSION_ID,
|
||||
@@ -214,11 +219,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _model_requires_encryption(model: str | None) -> bool:
|
||||
"""H and J models need pairing with PIN."""
|
||||
return model is not None and len(model) > 4 and model[4] in ("H", "J")
|
||||
|
||||
|
||||
async def _async_create_bridge_with_updated_data(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> SamsungTVBridge:
|
||||
@@ -279,7 +279,7 @@ async def _async_create_bridge_with_updated_data(
|
||||
LOGGER.info("Updated model to %s for %s", model, host)
|
||||
updated_data[CONF_MODEL] = model
|
||||
|
||||
if _model_requires_encryption(model) and method != METHOD_ENCRYPTED_WEBSOCKET:
|
||||
if model_requires_encryption(model) and method != METHOD_ENCRYPTED_WEBSOCKET:
|
||||
LOGGER.info(
|
||||
"Detected model %s for %s. Some televisions from H and J series use "
|
||||
"an encrypted protocol but you are using %s which may not be supported",
|
||||
|
||||
@@ -85,6 +85,11 @@ def mac_from_device_info(info: dict[str, Any]) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def model_requires_encryption(model: str | None) -> bool:
|
||||
"""H and J models need pairing with PIN."""
|
||||
return model is not None and len(model) > 4 and model[4] in ("H", "J")
|
||||
|
||||
|
||||
async def async_get_device_info(
|
||||
hass: HomeAssistant,
|
||||
host: str,
|
||||
@@ -99,17 +104,19 @@ async def async_get_device_info(
|
||||
port,
|
||||
info,
|
||||
)
|
||||
encrypted_bridge = SamsungTVEncryptedBridge(
|
||||
hass, METHOD_ENCRYPTED_WEBSOCKET, host, ENCRYPTED_WEBSOCKET_PORT
|
||||
)
|
||||
result = await encrypted_bridge.async_try_connect()
|
||||
if result != RESULT_CANNOT_CONNECT:
|
||||
return (
|
||||
result,
|
||||
ENCRYPTED_WEBSOCKET_PORT,
|
||||
METHOD_ENCRYPTED_WEBSOCKET,
|
||||
info,
|
||||
# Check the encrypted port if the model requires encryption
|
||||
if model_requires_encryption(info.get("device", {}).get("modelName")):
|
||||
encrypted_bridge = SamsungTVEncryptedBridge(
|
||||
hass, METHOD_ENCRYPTED_WEBSOCKET, host, ENCRYPTED_WEBSOCKET_PORT
|
||||
)
|
||||
result = await encrypted_bridge.async_try_connect()
|
||||
if result != RESULT_CANNOT_CONNECT:
|
||||
return (
|
||||
result,
|
||||
ENCRYPTED_WEBSOCKET_PORT,
|
||||
METHOD_ENCRYPTED_WEBSOCKET,
|
||||
info,
|
||||
)
|
||||
return RESULT_SUCCESS, port, METHOD_WEBSOCKET, info
|
||||
|
||||
# Try legacy port
|
||||
|
||||
@@ -42,6 +42,7 @@ from homeassistant.helpers.script import (
|
||||
CONF_MAX,
|
||||
CONF_MAX_EXCEEDED,
|
||||
Script,
|
||||
script_stack_cv,
|
||||
)
|
||||
from homeassistant.helpers.service import async_set_service_schema
|
||||
from homeassistant.helpers.trace import trace_get, trace_path
|
||||
@@ -398,10 +399,14 @@ class ScriptEntity(ToggleEntity, RestoreEntity):
|
||||
return
|
||||
|
||||
# Caller does not want to wait for called script to finish so let script run in
|
||||
# separate Task. However, wait for first state change so we can guarantee that
|
||||
# it is written to the State Machine before we return.
|
||||
# separate Task. Make a new empty script stack; scripts are allowed to
|
||||
# recursively turn themselves on when not waiting.
|
||||
script_stack_cv.set([])
|
||||
|
||||
self._changed.clear()
|
||||
self.hass.async_create_task(coro)
|
||||
# Wait for first state change so we can guarantee that
|
||||
# it is written to the State Machine before we return.
|
||||
await self._changed.wait()
|
||||
|
||||
async def _async_run(self, variables, context):
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"documentation": "https://www.home-assistant.io/integrations/slimproto",
|
||||
"requirements": ["aioslimproto==1.0.0"],
|
||||
"requirements": ["aioslimproto==2.0.1"],
|
||||
"codeowners": ["@marcelveldt"],
|
||||
"after_dependencies": ["media_source"]
|
||||
}
|
||||
|
||||
@@ -118,7 +118,7 @@ class SlimProtoPlayer(MediaPlayerEntity):
|
||||
EventType.PLAYER_CONNECTED,
|
||||
EventType.PLAYER_DISCONNECTED,
|
||||
EventType.PLAYER_NAME_RECEIVED,
|
||||
EventType.PLAYER_RPC_EVENT,
|
||||
EventType.PLAYER_CLI_EVENT,
|
||||
),
|
||||
player_filter=self.player.player_id,
|
||||
)
|
||||
@@ -205,7 +205,7 @@ class SlimProtoPlayer(MediaPlayerEntity):
|
||||
if event.type == EventType.PLAYER_CONNECTED:
|
||||
# player reconnected, update our player object
|
||||
self.player = self.slimserver.get_player(event.player_id)
|
||||
if event.type == EventType.PLAYER_RPC_EVENT:
|
||||
if event.type == EventType.PLAYER_CLI_EVENT:
|
||||
# rpc event from player such as a button press,
|
||||
# forward on the eventbus for others to handle
|
||||
dev_id = self.registry_entry.device_id if self.registry_entry else None
|
||||
|
||||
@@ -7,8 +7,14 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import PLATFORMS
|
||||
|
||||
|
||||
async def async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener for options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up SQL from a config entry."""
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_listener))
|
||||
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -26,7 +26,9 @@ DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_NAME, default="Select SQL Query"): selector.TextSelector(),
|
||||
vol.Optional(CONF_DB_URL): selector.TextSelector(),
|
||||
vol.Required(CONF_COLUMN_NAME): selector.TextSelector(),
|
||||
vol.Required(CONF_QUERY): selector.TextSelector(),
|
||||
vol.Required(CONF_QUERY): selector.TextSelector(
|
||||
selector.TextSelectorConfig(multiline=True)
|
||||
),
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): selector.TextSelector(),
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): selector.TemplateSelector(),
|
||||
}
|
||||
@@ -165,7 +167,14 @@ class SQLOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
except ValueError:
|
||||
errors["query"] = "query_invalid"
|
||||
else:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
CONF_NAME: self.entry.title,
|
||||
**self.entry.options,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
@@ -180,7 +189,9 @@ class SQLOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
vol.Required(
|
||||
CONF_QUERY,
|
||||
description={"suggested_value": self.entry.options[CONF_QUERY]},
|
||||
): selector.TextSelector(),
|
||||
): selector.TextSelector(
|
||||
selector.TextSelectorConfig(multiline=True)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_COLUMN_NAME,
|
||||
description={
|
||||
|
||||
@@ -17,8 +17,9 @@ from homeassistant.const import (
|
||||
CONF_ICON_TEMPLATE,
|
||||
CONF_NAME,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import CoreState, Event, callback
|
||||
from homeassistant.core import CoreState, Event, State, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -251,13 +252,28 @@ class TemplateEntity(Entity):
|
||||
self._entity_picture_template = config.get(CONF_PICTURE)
|
||||
self._friendly_name_template = config.get(CONF_NAME)
|
||||
|
||||
class DummyState(State):
|
||||
"""None-state for template entities not yet added to the state machine."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new state."""
|
||||
super().__init__("unknown.unknown", STATE_UNKNOWN)
|
||||
self.entity_id = None # type: ignore[assignment]
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Name of this state."""
|
||||
return "<None>"
|
||||
|
||||
variables = {"this": DummyState()}
|
||||
|
||||
# Try to render the name as it can influence the entity ID
|
||||
self._attr_name = fallback_name
|
||||
if self._friendly_name_template:
|
||||
self._friendly_name_template.hass = hass
|
||||
with contextlib.suppress(TemplateError):
|
||||
self._attr_name = self._friendly_name_template.async_render(
|
||||
parse_result=False
|
||||
variables=variables, parse_result=False
|
||||
)
|
||||
|
||||
# Templates will not render while the entity is unavailable, try to render the
|
||||
@@ -266,13 +282,15 @@ class TemplateEntity(Entity):
|
||||
self._entity_picture_template.hass = hass
|
||||
with contextlib.suppress(TemplateError):
|
||||
self._attr_entity_picture = self._entity_picture_template.async_render(
|
||||
parse_result=False
|
||||
variables=variables, parse_result=False
|
||||
)
|
||||
|
||||
if self._icon_template:
|
||||
self._icon_template.hass = hass
|
||||
with contextlib.suppress(TemplateError):
|
||||
self._attr_icon = self._icon_template.async_render(parse_result=False)
|
||||
self._attr_icon = self._icon_template.async_render(
|
||||
variables=variables, parse_result=False
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_available(self, result):
|
||||
@@ -373,10 +391,10 @@ class TemplateEntity(Entity):
|
||||
template_var_tups: list[TrackTemplate] = []
|
||||
has_availability_template = False
|
||||
|
||||
values = {"this": TemplateStateFromEntityId(self.hass, self.entity_id)}
|
||||
variables = {"this": TemplateStateFromEntityId(self.hass, self.entity_id)}
|
||||
|
||||
for template, attributes in self._template_attrs.items():
|
||||
template_var_tup = TrackTemplate(template, values)
|
||||
template_var_tup = TrackTemplate(template, variables)
|
||||
is_availability_template = False
|
||||
for attribute in attributes:
|
||||
# pylint: disable-next=protected-access
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"tensorflow==2.5.0",
|
||||
"tf-models-official==2.5.0",
|
||||
"pycocotools==2.0.1",
|
||||
"numpy==1.21.4",
|
||||
"numpy==1.21.6",
|
||||
"pillow==9.1.0"
|
||||
],
|
||||
"codeowners": [],
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "totalconnect",
|
||||
"name": "Total Connect",
|
||||
"documentation": "https://www.home-assistant.io/integrations/totalconnect",
|
||||
"requirements": ["total_connect_client==2022.3"],
|
||||
"requirements": ["total_connect_client==2022.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@austinmroczek"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "trend",
|
||||
"name": "Trend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/trend",
|
||||
"requirements": ["numpy==1.21.4"],
|
||||
"requirements": ["numpy==1.21.6"],
|
||||
"codeowners": [],
|
||||
"quality_scale": "internal",
|
||||
"iot_class": "local_push"
|
||||
|
||||
79
homeassistant/components/ukraine_alarm/__init__.py
Normal file
79
homeassistant/components/ukraine_alarm/__init__.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""The ukraine_alarm component."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientSession
|
||||
from ukrainealarm.client import Client
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_REGION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ALERT_TYPES, DOMAIN, PLATFORMS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Ukraine Alarm as config entry."""
|
||||
api_key = entry.data[CONF_API_KEY]
|
||||
region_id = entry.data[CONF_REGION]
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
|
||||
coordinator = UkraineAlarmDataUpdateCoordinator(
|
||||
hass, websession, api_key, region_id
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to manage fetching Ukraine Alarm API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
session: ClientSession,
|
||||
api_key: str,
|
||||
region_id: str,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.region_id = region_id
|
||||
self.ukrainealarm = Client(session, api_key)
|
||||
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
res = await self.ukrainealarm.get_alerts(self.region_id)
|
||||
except aiohttp.ClientError as error:
|
||||
raise UpdateFailed(f"Error fetching alerts from API: {error}") from error
|
||||
|
||||
current = {alert_type: False for alert_type in ALERT_TYPES}
|
||||
for alert in res[0]["activeAlerts"]:
|
||||
current[alert["type"]] = True
|
||||
|
||||
return current
|
||||
106
homeassistant/components/ukraine_alarm/binary_sensor.py
Normal file
106
homeassistant/components/ukraine_alarm/binary_sensor.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""binary sensors for Ukraine Alarm integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import UkraineAlarmDataUpdateCoordinator
|
||||
from .const import (
|
||||
ALERT_TYPE_AIR,
|
||||
ALERT_TYPE_ARTILLERY,
|
||||
ALERT_TYPE_UNKNOWN,
|
||||
ALERT_TYPE_URBAN_FIGHTS,
|
||||
ATTRIBUTION,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
)
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = (
|
||||
BinarySensorEntityDescription(
|
||||
key=ALERT_TYPE_UNKNOWN,
|
||||
name="Unknown",
|
||||
device_class=BinarySensorDeviceClass.SAFETY,
|
||||
),
|
||||
BinarySensorEntityDescription(
|
||||
key=ALERT_TYPE_AIR,
|
||||
name="Air",
|
||||
device_class=BinarySensorDeviceClass.SAFETY,
|
||||
icon="mdi:cloud",
|
||||
),
|
||||
BinarySensorEntityDescription(
|
||||
key=ALERT_TYPE_URBAN_FIGHTS,
|
||||
name="Urban Fights",
|
||||
device_class=BinarySensorDeviceClass.SAFETY,
|
||||
icon="mdi:pistol",
|
||||
),
|
||||
BinarySensorEntityDescription(
|
||||
key=ALERT_TYPE_ARTILLERY,
|
||||
name="Artillery",
|
||||
device_class=BinarySensorDeviceClass.SAFETY,
|
||||
icon="mdi:tank",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Ukraine Alarm binary sensor entities based on a config entry."""
|
||||
name = config_entry.data[CONF_NAME]
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
UkraineAlarmSensor(
|
||||
name,
|
||||
config_entry.unique_id,
|
||||
description,
|
||||
coordinator,
|
||||
)
|
||||
for description in BINARY_SENSOR_TYPES
|
||||
)
|
||||
|
||||
|
||||
class UkraineAlarmSensor(
|
||||
CoordinatorEntity[UkraineAlarmDataUpdateCoordinator], BinarySensorEntity
|
||||
):
|
||||
"""Class for a Ukraine Alarm binary sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
unique_id,
|
||||
description: BinarySensorEntityDescription,
|
||||
coordinator: UkraineAlarmDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_name = f"{name} {description.name}"
|
||||
self._attr_unique_id = f"{unique_id}-{description.key}".lower()
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=name,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.coordinator.data.get(self.entity_description.key, None)
|
||||
154
homeassistant/components/ukraine_alarm/config_flow.py
Normal file
154
homeassistant/components/ukraine_alarm/config_flow.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Config flow for Ukraine Alarm."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import aiohttp
|
||||
from ukrainealarm.client import Client
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_REGION
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class UkraineAlarmConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Ukraine Alarm."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a new UkraineAlarmConfigFlow."""
|
||||
self.api_key = None
|
||||
self.states = None
|
||||
self.selected_region = None
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
websession = async_get_clientsession(self.hass)
|
||||
try:
|
||||
regions = await Client(
|
||||
websession, user_input[CONF_API_KEY]
|
||||
).get_regions()
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
errors["base"] = "invalid_api_key" if ex.status == 401 else "unknown"
|
||||
except aiohttp.ClientConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except aiohttp.ClientError:
|
||||
errors["base"] = "unknown"
|
||||
except asyncio.TimeoutError:
|
||||
errors["base"] = "timeout"
|
||||
|
||||
if not errors and not regions:
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
self.states = regions["states"]
|
||||
return await self.async_step_state()
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=schema,
|
||||
description_placeholders={"api_url": "https://api.ukrainealarm.com/"},
|
||||
errors=errors,
|
||||
last_step=False,
|
||||
)
|
||||
|
||||
async def async_step_state(self, user_input=None):
|
||||
"""Handle user-chosen state."""
|
||||
return await self._handle_pick_region("state", "district", user_input)
|
||||
|
||||
async def async_step_district(self, user_input=None):
|
||||
"""Handle user-chosen district."""
|
||||
return await self._handle_pick_region("district", "community", user_input)
|
||||
|
||||
async def async_step_community(self, user_input=None):
|
||||
"""Handle user-chosen community."""
|
||||
return await self._handle_pick_region("community", None, user_input, True)
|
||||
|
||||
async def _handle_pick_region(
|
||||
self, step_id: str, next_step: str | None, user_input, last_step=False
|
||||
):
|
||||
"""Handle picking a (sub)region."""
|
||||
if self.selected_region:
|
||||
source = self.selected_region["regionChildIds"]
|
||||
else:
|
||||
source = self.states
|
||||
|
||||
if user_input is not None:
|
||||
# Only offer to browse subchildren if picked region wasn't the previously picked one
|
||||
if (
|
||||
not self.selected_region
|
||||
or user_input[CONF_REGION] != self.selected_region["regionId"]
|
||||
):
|
||||
self.selected_region = _find(source, user_input[CONF_REGION])
|
||||
|
||||
if next_step and self.selected_region["regionChildIds"]:
|
||||
return await getattr(self, f"async_step_{next_step}")()
|
||||
|
||||
return await self._async_finish_flow()
|
||||
|
||||
regions = {}
|
||||
if self.selected_region:
|
||||
regions[self.selected_region["regionId"]] = self.selected_region[
|
||||
"regionName"
|
||||
]
|
||||
|
||||
regions.update(_make_regions_object(source))
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_REGION): vol.In(regions),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=step_id, data_schema=schema, last_step=last_step
|
||||
)
|
||||
|
||||
async def _async_finish_flow(self):
|
||||
"""Finish the setup."""
|
||||
await self.async_set_unique_id(self.selected_region["regionId"])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.selected_region["regionName"],
|
||||
data={
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_REGION: self.selected_region["regionId"],
|
||||
CONF_NAME: self.selected_region["regionName"],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _find(regions, region_id):
|
||||
return next((region for region in regions if region["regionId"] == region_id), None)
|
||||
|
||||
|
||||
def _make_regions_object(regions):
|
||||
regions_list = []
|
||||
for region in regions:
|
||||
regions_list.append(
|
||||
{
|
||||
"id": region["regionId"],
|
||||
"name": region["regionName"],
|
||||
}
|
||||
)
|
||||
regions_list = sorted(regions_list, key=lambda region: region["name"].lower())
|
||||
regions_object = {}
|
||||
for region in regions_list:
|
||||
regions_object[region["id"]] = region["name"]
|
||||
|
||||
return regions_object
|
||||
19
homeassistant/components/ukraine_alarm/const.py
Normal file
19
homeassistant/components/ukraine_alarm/const.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Consts for the Ukraine Alarm."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "ukraine_alarm"
|
||||
ATTRIBUTION = "Data provided by Ukraine Alarm"
|
||||
MANUFACTURER = "Ukraine Alarm"
|
||||
ALERT_TYPE_UNKNOWN = "UNKNOWN"
|
||||
ALERT_TYPE_AIR = "AIR"
|
||||
ALERT_TYPE_ARTILLERY = "ARTILLERY"
|
||||
ALERT_TYPE_URBAN_FIGHTS = "URBAN_FIGHTS"
|
||||
ALERT_TYPES = {
|
||||
ALERT_TYPE_UNKNOWN,
|
||||
ALERT_TYPE_AIR,
|
||||
ALERT_TYPE_ARTILLERY,
|
||||
ALERT_TYPE_URBAN_FIGHTS,
|
||||
}
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
9
homeassistant/components/ukraine_alarm/manifest.json
Normal file
9
homeassistant/components/ukraine_alarm/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "ukraine_alarm",
|
||||
"name": "Ukraine Alarm",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ukraine_alarm",
|
||||
"requirements": ["ukrainealarm==0.0.1"],
|
||||
"codeowners": ["@PaulAnnekov"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
39
homeassistant/components/ukraine_alarm/strings.json
Normal file
39
homeassistant/components/ukraine_alarm/strings.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"description": "Set up the Ukraine Alarm integration. To generate an API key go to {api_url}"
|
||||
},
|
||||
"state": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "Choose state to monitor"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::state::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state, choose its specific district"
|
||||
},
|
||||
"community": {
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::state::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state and district, choose its specific community"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
28
homeassistant/components/ukraine_alarm/translations/en.json
Normal file
28
homeassistant/components/ukraine_alarm/translations/en.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up the Ukraine Alarm integration. To generate an API key go to {api_url}",
|
||||
"title": "Ukraine Alarm"
|
||||
},
|
||||
"state": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "Choose state to monitor"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "If you want to monitor not only state, choose its specific district"
|
||||
},
|
||||
"community": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "If you want to monitor not only state and district, choose its specific community"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
28
homeassistant/components/ukraine_alarm/translations/ru.json
Normal file
28
homeassistant/components/ukraine_alarm/translations/ru.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 Home Assistant \u0434\u043b\u044f\u0020\u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u0438\u0020\u0441 Ukraine Alarm. \u0414\u043b\u044f\u0020\u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f\u0020\u043a\u043b\u044e\u0447\u0430 API, \u043f\u0435\u0440\u0435\u0439\u0434\u0438\u0442\u0435\u0020\u043d\u0430 {api_url}.",
|
||||
"title": "Ukraine Alarm"
|
||||
},
|
||||
"state": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0438\u043e\u043d"
|
||||
},
|
||||
"description": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u0020\u0434\u043b\u044f\u0020\u043c\u043e\u043d\u0438\u0442\u043e\u0440\u0438\u043d\u0433\u0430"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0438\u043e\u043d"
|
||||
},
|
||||
"description": "\u0415\u0441\u043b\u0438\u0020\u0432\u044b\u0020\u0436\u0435\u043b\u0430\u0435\u0442\u0435\u0020\u043c\u043e\u043d\u0438\u0442\u043e\u0440\u0438\u0442\u044c\u0020\u043d\u0435\u0020\u0442\u043e\u043b\u044c\u043a\u043e\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u002c\u0020\u0432\u044b\u0431\u0435\u0440\u0438\u0442\u0435\u0020\u0435\u0451\u0020\u0440\u0430\u0439\u043e\u043d"
|
||||
},
|
||||
"community": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0438\u043e\u043d"
|
||||
},
|
||||
"description": "\u0415\u0441\u043b\u0438\u0020\u0432\u044b\u0020\u0436\u0435\u043b\u0430\u0435\u0442\u0435\u0020\u043c\u043e\u043d\u0438\u0442\u043e\u0440\u0438\u0442\u044c\u0020\u043d\u0435\u0020\u0442\u043e\u043b\u044c\u043a\u043e\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u0020\u0438\u0020\u0440\u0430\u0439\u043e\u043d\u002c\u0020\u0432\u044b\u0431\u0435\u0440\u0438\u0442\u0435\u0020\u0435\u0451\u0020\u0433\u0440\u043e\u043c\u0430\u0434\u0443"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
28
homeassistant/components/ukraine_alarm/translations/uk.json
Normal file
28
homeassistant/components/ukraine_alarm/translations/uk.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "\u041d\u0430\u043b\u0430\u0448\u0442\u0443\u0439\u0442\u0435 Home Assistant \u0434\u043b\u044f\u0020\u0456\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0456\u0457\u0020\u0437 Ukraine Alarm. \u0414\u043b\u044f\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043d\u043d\u044f\u0020\u043a\u043b\u044e\u0447\u0430 API, \u043f\u0435\u0440\u0435\u0439\u0434\u0456\u0442\u044c\u0020\u043d\u0430 {api_url}.",
|
||||
"title": "Ukraine Alarm"
|
||||
},
|
||||
"state": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0456\u043e\u043d"
|
||||
},
|
||||
"description": "\u041e\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u0020\u0434\u043b\u044f\u0020\u043c\u043e\u043d\u0456\u0442\u043e\u0440\u0438\u043d\u0433\u0443"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0456\u043e\u043d"
|
||||
},
|
||||
"description": "\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0431\u0430\u0436\u0430\u0454\u0442\u0435\u0020\u043c\u043e\u043d\u0456\u0442\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u0435\u0020\u043b\u0438\u0448\u0435\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u002c\u0020\u043e\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0457\u0457\u0020\u0440\u0430\u0439\u043e\u043d"
|
||||
},
|
||||
"community": {
|
||||
"data": {
|
||||
"region": "\u0420\u0435\u0433\u0456\u043e\u043d"
|
||||
},
|
||||
"description": "\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0431\u0430\u0436\u0430\u0454\u0442\u0435\u0020\u043c\u043e\u043d\u0456\u0442\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u0435\u0020\u0442\u0456\u043b\u044c\u043a\u0438\u0020\u043e\u0431\u043b\u0430\u0441\u0442\u044c\u0020\u0442\u0430\u0020\u0440\u0430\u0439\u043e\u043d\u002c\u0020\u043e\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0457\u0457\u0020\u0433\u0440\u043e\u043c\u0430\u0434\u0443"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,10 +20,20 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEV_TYPE_TO_HA = {
|
||||
"LV-PUR131S": "fan",
|
||||
"LV-RH131S": "fan", # Alt ID Model LV-PUR131S
|
||||
"Core200S": "fan",
|
||||
"LAP-C201S-AUSR": "fan", # Alt ID Model Core200S
|
||||
"LAP-C202S-WUSR": "fan", # Alt ID Model Core200S
|
||||
"Core300S": "fan",
|
||||
"LAP-C301S-WJP": "fan", # Alt ID Model Core300S
|
||||
"Core400S": "fan",
|
||||
"LAP-C401S-WJP": "fan", # Alt ID Model Core400S
|
||||
"LAP-C401S-WUSR": "fan", # Alt ID Model Core400S
|
||||
"LAP-C401S-WAAA": "fan", # Alt ID Model Core400S
|
||||
"Core600S": "fan",
|
||||
"LAP-C601S-WUS": "fan", # Alt ID Model Core600S
|
||||
"LAP-C601S-WUSR": "fan", # Alt ID Model Core600S
|
||||
"LAP-C601S-WEU": "fan", # Alt ID Model Core600S
|
||||
}
|
||||
|
||||
FAN_MODE_AUTO = "auto"
|
||||
@@ -31,17 +41,37 @@ FAN_MODE_SLEEP = "sleep"
|
||||
|
||||
PRESET_MODES = {
|
||||
"LV-PUR131S": [FAN_MODE_AUTO, FAN_MODE_SLEEP],
|
||||
"LV-RH131S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model LV-PUR131S
|
||||
"Core200S": [FAN_MODE_SLEEP],
|
||||
"LAP-C201S-AUSR": [FAN_MODE_SLEEP], # Alt ID Model Core200S
|
||||
"LAP-C202S-WUSR": [FAN_MODE_SLEEP], # Alt ID Model Core200S
|
||||
"Core300S": [FAN_MODE_AUTO, FAN_MODE_SLEEP],
|
||||
"LAP-C301S-WJP": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core300S
|
||||
"Core400S": [FAN_MODE_AUTO, FAN_MODE_SLEEP],
|
||||
"LAP-C401S-WJP": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core400S
|
||||
"LAP-C401S-WUSR": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core400S
|
||||
"LAP-C401S-WAAA": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core400S
|
||||
"Core600S": [FAN_MODE_AUTO, FAN_MODE_SLEEP],
|
||||
"LAP-C601S-WUS": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core600S
|
||||
"LAP-C601S-WUSR": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core600S
|
||||
"LAP-C601S-WEU": [FAN_MODE_AUTO, FAN_MODE_SLEEP], # Alt ID Model Core600S
|
||||
}
|
||||
SPEED_RANGE = { # off is not included
|
||||
"LV-PUR131S": (1, 3),
|
||||
"LV-RH131S": (1, 3), # ALt ID Model LV-PUR131S
|
||||
"Core200S": (1, 3),
|
||||
"LAP-C201S-AUSR": (1, 3), # ALt ID Model Core200S
|
||||
"LAP-C202S-WUSR": (1, 3), # ALt ID Model Core200S
|
||||
"Core300S": (1, 3),
|
||||
"LAP-C301S-WJP": (1, 3), # ALt ID Model Core300S
|
||||
"Core400S": (1, 4),
|
||||
"LAP-C401S-WJP": (1, 4), # ALt ID Model Core400S
|
||||
"LAP-C401S-WUSR": (1, 4), # ALt ID Model Core400S
|
||||
"LAP-C401S-WAAA": (1, 4), # ALt ID Model Core400S
|
||||
"Core600S": (1, 4),
|
||||
"LAP-C601S-WUS": (1, 4), # ALt ID Model Core600S
|
||||
"LAP-C601S-WUSR": (1, 4), # ALt ID Model Core600S
|
||||
"LAP-C601S-WEU": (1, 4), # ALt ID Model Core600S
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -308,6 +308,22 @@ class OnOffChannel(ZigbeeChannel):
|
||||
"""Return cached value of on/off attribute."""
|
||||
return self.cluster.get("on_off")
|
||||
|
||||
async def turn_on(self) -> bool:
|
||||
"""Turn the on off cluster on."""
|
||||
result = await self.on()
|
||||
if isinstance(result, Exception) or result[1] is not Status.SUCCESS:
|
||||
return False
|
||||
self.cluster.update_attribute(self.ON_OFF, t.Bool.true)
|
||||
return True
|
||||
|
||||
async def turn_off(self) -> bool:
|
||||
"""Turn the on off cluster off."""
|
||||
result = await self.off()
|
||||
if isinstance(result, Exception) or result[1] is not Status.SUCCESS:
|
||||
return False
|
||||
self.cluster.update_attribute(self.ON_OFF, t.Bool.false)
|
||||
return True
|
||||
|
||||
@callback
|
||||
def cluster_command(self, tsn, command_id, args):
|
||||
"""Handle commands received to this cluster."""
|
||||
|
||||
@@ -64,15 +64,15 @@ class Switch(ZhaEntity, SwitchEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs) -> None:
|
||||
"""Turn the entity on."""
|
||||
result = await self._on_off_channel.on()
|
||||
if isinstance(result, Exception) or result[1] is not Status.SUCCESS:
|
||||
result = await self._on_off_channel.turn_on()
|
||||
if not result:
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs) -> None:
|
||||
"""Turn the entity off."""
|
||||
result = await self._on_off_channel.off()
|
||||
if isinstance(result, Exception) or result[1] is not Status.SUCCESS:
|
||||
result = await self._on_off_channel.turn_off()
|
||||
if not result:
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 5
|
||||
PATCH_VERSION: Final = "0b6"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
||||
@@ -366,6 +366,7 @@ FLOWS = {
|
||||
"twentemilieu",
|
||||
"twilio",
|
||||
"twinkly",
|
||||
"ukraine_alarm",
|
||||
"unifi",
|
||||
"unifiprotect",
|
||||
"upb",
|
||||
|
||||
@@ -103,7 +103,10 @@ ZEROCONF = {
|
||||
"domain": "devolo_home_control"
|
||||
},
|
||||
{
|
||||
"domain": "devolo_home_network"
|
||||
"domain": "devolo_home_network",
|
||||
"properties": {
|
||||
"MT": "*"
|
||||
}
|
||||
}
|
||||
],
|
||||
"_easylink._tcp.local.": [
|
||||
|
||||
@@ -711,6 +711,10 @@ class EntityRegistry:
|
||||
if not valid_entity_id(entity["entity_id"]):
|
||||
continue
|
||||
|
||||
# We removed this in 2022.5. Remove this check in 2023.1.
|
||||
if entity["entity_category"] == "system":
|
||||
entity["entity_category"] = None
|
||||
|
||||
entities[entity["entity_id"]] = RegistryEntry(
|
||||
area_id=entity["area_id"],
|
||||
capabilities=entity["capabilities"],
|
||||
|
||||
@@ -205,6 +205,10 @@ async def trace_action(hass, script_run, stop, variables):
|
||||
except _AbortScript as ex:
|
||||
trace_element.set_error(ex.__cause__ or ex)
|
||||
raise ex
|
||||
except _ConditionFail as ex:
|
||||
# Clear errors which may have been set when evaluating the condition
|
||||
trace_element.set_error(None)
|
||||
raise ex
|
||||
except _StopScript as ex:
|
||||
raise ex
|
||||
except Exception as ex:
|
||||
@@ -325,11 +329,19 @@ async def async_validate_action_config(
|
||||
return config
|
||||
|
||||
|
||||
class _AbortScript(Exception):
|
||||
class _HaltScript(Exception):
|
||||
"""Throw if script needs to stop executing."""
|
||||
|
||||
|
||||
class _AbortScript(_HaltScript):
|
||||
"""Throw if script needs to abort because of an unexpected error."""
|
||||
|
||||
|
||||
class _StopScript(Exception):
|
||||
class _ConditionFail(_HaltScript):
|
||||
"""Throw if script needs to stop because a condition evaluated to False."""
|
||||
|
||||
|
||||
class _StopScript(_HaltScript):
|
||||
"""Throw if script needs to stop."""
|
||||
|
||||
|
||||
@@ -393,16 +405,18 @@ class _ScriptRun:
|
||||
await self._async_step(log_exceptions=False)
|
||||
else:
|
||||
script_execution_set("finished")
|
||||
except _StopScript:
|
||||
script_execution_set("finished")
|
||||
# Let the _StopScript bubble up if this is a sub-script
|
||||
if not self._script.top_level:
|
||||
raise
|
||||
except _AbortScript:
|
||||
script_execution_set("aborted")
|
||||
# Let the _AbortScript bubble up if this is a sub-script
|
||||
if not self._script.top_level:
|
||||
raise
|
||||
except _ConditionFail:
|
||||
script_execution_set("aborted")
|
||||
except _StopScript:
|
||||
script_execution_set("finished")
|
||||
# Let the _StopScript bubble up if this is a sub-script
|
||||
if not self._script.top_level:
|
||||
raise
|
||||
except Exception:
|
||||
script_execution_set("error")
|
||||
raise
|
||||
@@ -425,6 +439,7 @@ class _ScriptRun:
|
||||
self._log(
|
||||
"Skipped disabled step %s", self._action.get(CONF_ALIAS, action)
|
||||
)
|
||||
trace_set_result(enabled=False)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -450,7 +465,7 @@ class _ScriptRun:
|
||||
def _handle_exception(
|
||||
self, exception: Exception, continue_on_error: bool, log_exceptions: bool
|
||||
) -> None:
|
||||
if not isinstance(exception, (_AbortScript, _StopScript)) and log_exceptions:
|
||||
if not isinstance(exception, _HaltScript) and log_exceptions:
|
||||
self._log_exception(exception)
|
||||
|
||||
if not continue_on_error:
|
||||
@@ -726,7 +741,7 @@ class _ScriptRun:
|
||||
self._log("Test condition %s: %s", self._script.last_action, check)
|
||||
trace_update_result(result=check)
|
||||
if not check:
|
||||
raise _AbortScript
|
||||
raise _ConditionFail
|
||||
|
||||
def _test_conditions(self, conditions, name, condition_path=None):
|
||||
if condition_path is None:
|
||||
@@ -1133,13 +1148,14 @@ class Script:
|
||||
domain: str,
|
||||
*,
|
||||
# Used in "Running <running_description>" log message
|
||||
running_description: str | None = None,
|
||||
change_listener: Callable[..., Any] | None = None,
|
||||
script_mode: str = DEFAULT_SCRIPT_MODE,
|
||||
max_runs: int = DEFAULT_MAX,
|
||||
max_exceeded: str = DEFAULT_MAX_EXCEEDED,
|
||||
logger: logging.Logger | None = None,
|
||||
copy_variables: bool = False,
|
||||
log_exceptions: bool = True,
|
||||
logger: logging.Logger | None = None,
|
||||
max_exceeded: str = DEFAULT_MAX_EXCEEDED,
|
||||
max_runs: int = DEFAULT_MAX,
|
||||
running_description: str | None = None,
|
||||
script_mode: str = DEFAULT_SCRIPT_MODE,
|
||||
top_level: bool = True,
|
||||
variables: ScriptVariables | None = None,
|
||||
) -> None:
|
||||
@@ -1192,6 +1208,7 @@ class Script:
|
||||
self._variables_dynamic = template.is_complex(variables)
|
||||
if self._variables_dynamic:
|
||||
template.attach(hass, variables)
|
||||
self._copy_variables_on_run = copy_variables
|
||||
|
||||
@property
|
||||
def change_listener(self) -> Callable[..., Any] | None:
|
||||
@@ -1454,7 +1471,10 @@ class Script:
|
||||
|
||||
variables["context"] = context
|
||||
else:
|
||||
variables = cast(dict, run_variables)
|
||||
if self._copy_variables_on_run:
|
||||
variables = cast(dict, copy(run_variables))
|
||||
else:
|
||||
variables = cast(dict, run_variables)
|
||||
|
||||
# Prevent non-allowed recursive calls which will cause deadlocks when we try to
|
||||
# stop (restart) or wait for (queued) our own script run.
|
||||
@@ -1671,6 +1691,7 @@ class Script:
|
||||
max_runs=self.max_runs,
|
||||
logger=self._logger,
|
||||
top_level=False,
|
||||
copy_variables=True,
|
||||
)
|
||||
parallel_script.change_listener = partial(
|
||||
self._chain_change_listener, parallel_script
|
||||
|
||||
@@ -5,11 +5,13 @@ from collections.abc import Callable, Sequence
|
||||
from typing import Any, TypedDict, cast
|
||||
|
||||
import voluptuous as vol
|
||||
import yaml
|
||||
|
||||
from homeassistant.backports.enum import StrEnum
|
||||
from homeassistant.const import CONF_MODE, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import split_entity_id, valid_entity_id
|
||||
from homeassistant.util import decorator
|
||||
from homeassistant.util.yaml.dumper import represent_odict
|
||||
|
||||
from . import config_validation as cv
|
||||
|
||||
@@ -611,8 +613,8 @@ class NumberSelector(Selector):
|
||||
vol.Coerce(float), vol.Range(min=1e-3)
|
||||
),
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): str,
|
||||
vol.Optional(CONF_MODE, default=NumberSelectorMode.SLIDER): vol.Coerce(
|
||||
NumberSelectorMode
|
||||
vol.Optional(CONF_MODE, default=NumberSelectorMode.SLIDER): vol.All(
|
||||
vol.Coerce(NumberSelectorMode), lambda val: val.value
|
||||
),
|
||||
}
|
||||
),
|
||||
@@ -702,7 +704,9 @@ class SelectSelector(Selector):
|
||||
vol.Required("options"): vol.All(vol.Any([str], [select_option])),
|
||||
vol.Optional("multiple", default=False): cv.boolean,
|
||||
vol.Optional("custom_value", default=False): cv.boolean,
|
||||
vol.Optional("mode"): vol.Coerce(SelectSelectorMode),
|
||||
vol.Optional("mode"): vol.All(
|
||||
vol.Coerce(SelectSelectorMode), lambda val: val.value
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -825,7 +829,9 @@ class TextSelector(Selector):
|
||||
vol.Optional("suffix"): str,
|
||||
# The "type" controls the input field in the browser, the resulting
|
||||
# data can be any string so we don't validate it.
|
||||
vol.Optional("type"): vol.Coerce(TextSelectorType),
|
||||
vol.Optional("type"): vol.All(
|
||||
vol.Coerce(TextSelectorType), lambda val: val.value
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -881,3 +887,11 @@ class TimeSelector(Selector):
|
||||
"""Validate the passed selection."""
|
||||
cv.time(data)
|
||||
return cast(str, data)
|
||||
|
||||
|
||||
yaml.SafeDumper.add_representer(
|
||||
Selector,
|
||||
lambda dumper, value: represent_odict(
|
||||
dumper, "tag:yaml.org,2002:map", value.serialize()
|
||||
),
|
||||
)
|
||||
|
||||
@@ -850,7 +850,8 @@ class TemplateStateFromEntityId(TemplateStateBase):
|
||||
@property
|
||||
def _state(self) -> State: # type: ignore[override] # mypy issue 4125
|
||||
state = self._hass.states.get(self._entity_id)
|
||||
assert state
|
||||
if not state:
|
||||
state = State(self._entity_id, STATE_UNKNOWN)
|
||||
return state
|
||||
|
||||
def __repr__(self) -> str:
|
||||
|
||||
@@ -15,7 +15,7 @@ ciso8601==2.2.0
|
||||
cryptography==36.0.2
|
||||
fnvhash==0.1.0
|
||||
hass-nabucasa==0.54.0
|
||||
home-assistant-frontend==20220502.0
|
||||
home-assistant-frontend==20220504.1
|
||||
httpx==0.22.0
|
||||
ifaddr==0.1.7
|
||||
jinja2==3.1.1
|
||||
@@ -51,6 +51,7 @@ httplib2>=0.19.0
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.45.0
|
||||
grpcio-status==1.45.0
|
||||
|
||||
# libcst >=0.4.0 requires a newer Rust than we currently have available,
|
||||
# thus our wheels builds fail. This pins it to the last working version,
|
||||
|
||||
@@ -110,7 +110,7 @@ aio_geojson_nsw_rfs_incidents==0.4
|
||||
aio_georss_gdacs==0.7
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.4.2
|
||||
aioairzone==0.4.3
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2021.11.0
|
||||
@@ -211,7 +211,7 @@ aionotify==0.2.0
|
||||
aionotion==3.0.2
|
||||
|
||||
# homeassistant.components.oncue
|
||||
aiooncue==0.3.2
|
||||
aiooncue==0.3.4
|
||||
|
||||
# homeassistant.components.acmeda
|
||||
aiopulse==0.4.3
|
||||
@@ -244,7 +244,7 @@ aiosenz==1.0.0
|
||||
aioshelly==2.0.0
|
||||
|
||||
# homeassistant.components.slimproto
|
||||
aioslimproto==1.0.0
|
||||
aioslimproto==2.0.1
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.1
|
||||
@@ -723,7 +723,7 @@ gios==2.1.0
|
||||
gitterpy==0.1.7
|
||||
|
||||
# homeassistant.components.glances
|
||||
glances_api==0.3.4
|
||||
glances_api==0.3.5
|
||||
|
||||
# homeassistant.components.goalzero
|
||||
goalzero==0.2.1
|
||||
@@ -819,7 +819,7 @@ hole==0.7.0
|
||||
holidays==0.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220502.0
|
||||
home-assistant-frontend==20220504.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.0
|
||||
@@ -1065,7 +1065,7 @@ netdisco==3.0.0
|
||||
netmap==0.7.0.2
|
||||
|
||||
# homeassistant.components.nam
|
||||
nettigo-air-monitor==1.2.2
|
||||
nettigo-air-monitor==1.2.3
|
||||
|
||||
# homeassistant.components.neurio_energy
|
||||
neurio==0.3.1
|
||||
@@ -1111,7 +1111,7 @@ numato-gpio==0.10.0
|
||||
# homeassistant.components.opencv
|
||||
# homeassistant.components.tensorflow
|
||||
# homeassistant.components.trend
|
||||
numpy==1.21.4
|
||||
numpy==1.21.6
|
||||
|
||||
# homeassistant.components.oasa_telematics
|
||||
oasatelematics==0.3
|
||||
@@ -1287,7 +1287,7 @@ pushover_complete==1.1.1
|
||||
pvo==0.2.2
|
||||
|
||||
# homeassistant.components.canary
|
||||
py-canary==0.5.1
|
||||
py-canary==0.5.2
|
||||
|
||||
# homeassistant.components.cpuspeed
|
||||
py-cpuinfo==8.0.0
|
||||
@@ -1399,7 +1399,7 @@ pycfdns==1.2.2
|
||||
pychannels==1.0.0
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==12.0.0
|
||||
pychromecast==12.1.1
|
||||
|
||||
# homeassistant.components.pocketcasts
|
||||
pycketcasts==1.0.0
|
||||
@@ -1664,7 +1664,7 @@ pymyq==3.1.4
|
||||
pymysensors==0.22.1
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.9.4
|
||||
pynetgear==0.10.0
|
||||
|
||||
# homeassistant.components.netio
|
||||
pynetio==0.1.9.1
|
||||
@@ -2316,7 +2316,7 @@ tololib==0.1.0b3
|
||||
toonapi==0.2.1
|
||||
|
||||
# homeassistant.components.totalconnect
|
||||
total_connect_client==2022.3
|
||||
total_connect_client==2022.5
|
||||
|
||||
# homeassistant.components.tplink_lte
|
||||
tp-connected==0.0.4
|
||||
@@ -2342,6 +2342,9 @@ twitchAPI==2.5.2
|
||||
# homeassistant.components.rainforest_eagle
|
||||
uEagle==0.0.2
|
||||
|
||||
# homeassistant.components.ukraine_alarm
|
||||
ukrainealarm==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
unifi-discovery==1.1.2
|
||||
|
||||
@@ -2445,7 +2448,7 @@ xbox-webapi==2.0.11
|
||||
xboxapi==2.0.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.21.1
|
||||
xknx==0.21.2
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
|
||||
@@ -94,7 +94,7 @@ aio_geojson_nsw_rfs_incidents==0.4
|
||||
aio_georss_gdacs==0.7
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.4.2
|
||||
aioairzone==0.4.3
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2021.11.0
|
||||
@@ -177,7 +177,7 @@ aionanoleaf==0.2.0
|
||||
aionotion==3.0.2
|
||||
|
||||
# homeassistant.components.oncue
|
||||
aiooncue==0.3.2
|
||||
aiooncue==0.3.4
|
||||
|
||||
# homeassistant.components.acmeda
|
||||
aiopulse==0.4.3
|
||||
@@ -210,7 +210,7 @@ aiosenz==1.0.0
|
||||
aioshelly==2.0.0
|
||||
|
||||
# homeassistant.components.slimproto
|
||||
aioslimproto==1.0.0
|
||||
aioslimproto==2.0.1
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.1
|
||||
@@ -514,7 +514,7 @@ getmac==0.8.2
|
||||
gios==2.1.0
|
||||
|
||||
# homeassistant.components.glances
|
||||
glances_api==0.3.4
|
||||
glances_api==0.3.5
|
||||
|
||||
# homeassistant.components.goalzero
|
||||
goalzero==0.2.1
|
||||
@@ -580,7 +580,7 @@ hole==0.7.0
|
||||
holidays==0.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20220502.0
|
||||
home-assistant-frontend==20220504.1
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.0
|
||||
@@ -727,7 +727,7 @@ netdisco==3.0.0
|
||||
netmap==0.7.0.2
|
||||
|
||||
# homeassistant.components.nam
|
||||
nettigo-air-monitor==1.2.2
|
||||
nettigo-air-monitor==1.2.3
|
||||
|
||||
# homeassistant.components.nexia
|
||||
nexia==0.9.13
|
||||
@@ -755,7 +755,7 @@ numato-gpio==0.10.0
|
||||
# homeassistant.components.opencv
|
||||
# homeassistant.components.tensorflow
|
||||
# homeassistant.components.trend
|
||||
numpy==1.21.4
|
||||
numpy==1.21.6
|
||||
|
||||
# homeassistant.components.google
|
||||
oauth2client==4.1.3
|
||||
@@ -865,7 +865,7 @@ pushbullet.py==0.11.0
|
||||
pvo==0.2.2
|
||||
|
||||
# homeassistant.components.canary
|
||||
py-canary==0.5.1
|
||||
py-canary==0.5.2
|
||||
|
||||
# homeassistant.components.cpuspeed
|
||||
py-cpuinfo==8.0.0
|
||||
@@ -938,7 +938,7 @@ pybotvac==0.0.23
|
||||
pycfdns==1.2.2
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==12.0.0
|
||||
pychromecast==12.1.1
|
||||
|
||||
# homeassistant.components.climacell
|
||||
pyclimacell==0.18.2
|
||||
@@ -1116,7 +1116,7 @@ pymyq==3.1.4
|
||||
pymysensors==0.22.1
|
||||
|
||||
# homeassistant.components.netgear
|
||||
pynetgear==0.9.4
|
||||
pynetgear==0.10.0
|
||||
|
||||
# homeassistant.components.nina
|
||||
pynina==0.1.8
|
||||
@@ -1501,7 +1501,7 @@ tololib==0.1.0b3
|
||||
toonapi==0.2.1
|
||||
|
||||
# homeassistant.components.totalconnect
|
||||
total_connect_client==2022.3
|
||||
total_connect_client==2022.5
|
||||
|
||||
# homeassistant.components.transmission
|
||||
transmissionrpc==0.11
|
||||
@@ -1524,6 +1524,9 @@ twitchAPI==2.5.2
|
||||
# homeassistant.components.rainforest_eagle
|
||||
uEagle==0.0.2
|
||||
|
||||
# homeassistant.components.ukraine_alarm
|
||||
ukrainealarm==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
unifi-discovery==1.1.2
|
||||
|
||||
@@ -1597,7 +1600,7 @@ wolf_smartset==0.1.11
|
||||
xbox-webapi==2.0.11
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.21.1
|
||||
xknx==0.21.2
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
|
||||
@@ -68,6 +68,7 @@ httplib2>=0.19.0
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.45.0
|
||||
grpcio-status==1.45.0
|
||||
|
||||
# libcst >=0.4.0 requires a newer Rust than we currently have available,
|
||||
# thus our wheels builds fail. This pins it to the last working version,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = homeassistant
|
||||
version = 2022.5.0b6
|
||||
version = 2022.5.3
|
||||
author = The Home Assistant Authors
|
||||
author_email = hello@home-assistant.io
|
||||
license = Apache-2.0
|
||||
|
||||
@@ -147,7 +147,7 @@ async def test_airzone_climate_turn_on_off(hass: HomeAssistant) -> None:
|
||||
]
|
||||
}
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -172,7 +172,7 @@ async def test_airzone_climate_turn_on_off(hass: HomeAssistant) -> None:
|
||||
]
|
||||
}
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -204,7 +204,7 @@ async def test_airzone_climate_set_hvac_mode(hass: HomeAssistant) -> None:
|
||||
]
|
||||
}
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -230,7 +230,7 @@ async def test_airzone_climate_set_hvac_mode(hass: HomeAssistant) -> None:
|
||||
]
|
||||
}
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK_2,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -263,7 +263,7 @@ async def test_airzone_climate_set_hvac_slave_error(hass: HomeAssistant) -> None
|
||||
await async_init_integration(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
), pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
@@ -296,7 +296,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None:
|
||||
await async_init_integration(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.http_request",
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
|
||||
@@ -1474,6 +1474,7 @@ async def test_blueprint_automation(hass, calls):
|
||||
"input": {
|
||||
"trigger_event": "blueprint_event",
|
||||
"service_to_call": "test.automation",
|
||||
"a_number": 5,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1499,6 +1500,7 @@ async def test_blueprint_automation_bad_config(hass, caplog):
|
||||
"input": {
|
||||
"trigger_event": "blueprint_event",
|
||||
"service_to_call": {"dict": "not allowed"},
|
||||
"a_number": 5,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1791,18 +1793,12 @@ async def test_recursive_automation(hass: HomeAssistant, automation_mode, caplog
|
||||
)
|
||||
|
||||
service_called = asyncio.Event()
|
||||
service_called_late = []
|
||||
|
||||
async def async_service_handler(service):
|
||||
if service.service == "automation_done":
|
||||
service_called.set()
|
||||
if service.service == "automation_started_late":
|
||||
service_called_late.append(service)
|
||||
|
||||
hass.services.async_register("test", "automation_done", async_service_handler)
|
||||
hass.services.async_register(
|
||||
"test", "automation_started_late", async_service_handler
|
||||
)
|
||||
|
||||
hass.bus.async_fire("trigger_automation")
|
||||
await asyncio.wait_for(service_called.wait(), 1)
|
||||
|
||||
@@ -198,7 +198,8 @@ async def test_fetch_blueprint_from_github_url(hass, aioclient_mock, url):
|
||||
assert imported_blueprint.blueprint.domain == "automation"
|
||||
assert imported_blueprint.blueprint.inputs == {
|
||||
"service_to_call": None,
|
||||
"trigger_event": None,
|
||||
"trigger_event": {"selector": {"text": {}}},
|
||||
"a_number": {"selector": {"number": {"mode": "box", "step": 1.0}}},
|
||||
}
|
||||
assert imported_blueprint.suggested_filename == "balloob/motion_light"
|
||||
assert imported_blueprint.blueprint.metadata["source_url"] == url
|
||||
|
||||
@@ -30,7 +30,11 @@ async def test_list_blueprints(hass, hass_ws_client):
|
||||
"test_event_service.yaml": {
|
||||
"metadata": {
|
||||
"domain": "automation",
|
||||
"input": {"service_to_call": None, "trigger_event": None},
|
||||
"input": {
|
||||
"service_to_call": None,
|
||||
"trigger_event": {"selector": {"text": {}}},
|
||||
"a_number": {"selector": {"number": {"mode": "box", "step": 1.0}}},
|
||||
},
|
||||
"name": "Call service based on event",
|
||||
},
|
||||
},
|
||||
@@ -89,7 +93,11 @@ async def test_import_blueprint(hass, aioclient_mock, hass_ws_client):
|
||||
"blueprint": {
|
||||
"metadata": {
|
||||
"domain": "automation",
|
||||
"input": {"service_to_call": None, "trigger_event": None},
|
||||
"input": {
|
||||
"service_to_call": None,
|
||||
"trigger_event": {"selector": {"text": {}}},
|
||||
"a_number": {"selector": {"number": {"mode": "box", "step": 1.0}}},
|
||||
},
|
||||
"name": "Call service based on event",
|
||||
"source_url": "https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml",
|
||||
},
|
||||
@@ -123,7 +131,7 @@ async def test_save_blueprint(hass, aioclient_mock, hass_ws_client):
|
||||
assert msg["success"]
|
||||
assert write_mock.mock_calls
|
||||
assert write_mock.call_args[0] == (
|
||||
"blueprint:\n name: Call service based on event\n domain: automation\n input:\n trigger_event:\n service_to_call:\n source_url: https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n platform: event\n event_type: !input 'trigger_event'\naction:\n service: !input 'service_to_call'\n entity_id: light.kitchen\n",
|
||||
"blueprint:\n name: Call service based on event\n domain: automation\n input:\n trigger_event:\n selector:\n text: {}\n service_to_call:\n a_number:\n selector:\n number:\n mode: box\n step: 1.0\n source_url: https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n platform: event\n event_type: !input 'trigger_event'\naction:\n service: !input 'service_to_call'\n entity_id: light.kitchen\n",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -14,6 +14,13 @@ def get_multizone_status_mock():
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def get_cast_type_mock():
|
||||
"""Mock pychromecast dial."""
|
||||
mock = MagicMock(spec_set=pychromecast.dial.get_cast_type)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def castbrowser_mock():
|
||||
"""Mock pychromecast CastBrowser."""
|
||||
@@ -43,6 +50,7 @@ def cast_mock(
|
||||
mz_mock,
|
||||
quick_play_mock,
|
||||
castbrowser_mock,
|
||||
get_cast_type_mock,
|
||||
get_chromecast_mock,
|
||||
get_multizone_status_mock,
|
||||
):
|
||||
@@ -52,6 +60,9 @@ def cast_mock(
|
||||
with patch(
|
||||
"homeassistant.components.cast.discovery.pychromecast.discovery.CastBrowser",
|
||||
castbrowser_mock,
|
||||
), patch(
|
||||
"homeassistant.components.cast.helpers.dial.get_cast_type",
|
||||
get_cast_type_mock,
|
||||
), patch(
|
||||
"homeassistant.components.cast.helpers.dial.get_multizone_status",
|
||||
get_multizone_status_mock,
|
||||
|
||||
@@ -64,6 +64,8 @@ FAKE_MDNS_SERVICE = pychromecast.discovery.ServiceInfo(
|
||||
pychromecast.const.SERVICE_TYPE_MDNS, "the-service"
|
||||
)
|
||||
|
||||
UNDEFINED = object()
|
||||
|
||||
|
||||
def get_fake_chromecast(info: ChromecastInfo):
|
||||
"""Generate a Fake Chromecast object with the specified arguments."""
|
||||
@@ -74,7 +76,14 @@ def get_fake_chromecast(info: ChromecastInfo):
|
||||
|
||||
|
||||
def get_fake_chromecast_info(
|
||||
host="192.168.178.42", port=8009, service=None, uuid: UUID | None = FakeUUID
|
||||
*,
|
||||
host="192.168.178.42",
|
||||
port=8009,
|
||||
service=None,
|
||||
uuid: UUID | None = FakeUUID,
|
||||
cast_type=UNDEFINED,
|
||||
manufacturer=UNDEFINED,
|
||||
model_name=UNDEFINED,
|
||||
):
|
||||
"""Generate a Fake ChromecastInfo with the specified arguments."""
|
||||
|
||||
@@ -82,16 +91,22 @@ def get_fake_chromecast_info(
|
||||
service = pychromecast.discovery.ServiceInfo(
|
||||
pychromecast.const.SERVICE_TYPE_HOST, (host, port)
|
||||
)
|
||||
if cast_type is UNDEFINED:
|
||||
cast_type = CAST_TYPE_GROUP if port != 8009 else CAST_TYPE_CHROMECAST
|
||||
if manufacturer is UNDEFINED:
|
||||
manufacturer = "Nabu Casa"
|
||||
if model_name is UNDEFINED:
|
||||
model_name = "Chromecast"
|
||||
return ChromecastInfo(
|
||||
cast_info=pychromecast.models.CastInfo(
|
||||
services={service},
|
||||
uuid=uuid,
|
||||
model_name="Chromecast",
|
||||
model_name=model_name,
|
||||
friendly_name="Speaker",
|
||||
host=host,
|
||||
port=port,
|
||||
cast_type=CAST_TYPE_GROUP if port != 8009 else CAST_TYPE_CHROMECAST,
|
||||
manufacturer="Nabu Casa",
|
||||
cast_type=cast_type,
|
||||
manufacturer=manufacturer,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -342,6 +357,92 @@ async def test_internal_discovery_callback_fill_out_group(
|
||||
get_multizone_status_mock.assert_called_once()
|
||||
|
||||
|
||||
async def test_internal_discovery_callback_fill_out_cast_type_manufacturer(
|
||||
hass, get_cast_type_mock, caplog
|
||||
):
|
||||
"""Test internal discovery automatically filling out information."""
|
||||
discover_cast, _, _ = await async_setup_cast_internal_discovery(hass)
|
||||
info = get_fake_chromecast_info(
|
||||
host="host1",
|
||||
port=8009,
|
||||
service=FAKE_MDNS_SERVICE,
|
||||
cast_type=None,
|
||||
manufacturer=None,
|
||||
)
|
||||
info2 = get_fake_chromecast_info(
|
||||
host="host1",
|
||||
port=8009,
|
||||
service=FAKE_MDNS_SERVICE,
|
||||
cast_type=None,
|
||||
manufacturer=None,
|
||||
model_name="Model 101",
|
||||
)
|
||||
zconf = get_fake_zconf(host="host1", port=8009)
|
||||
full_info = attr.evolve(
|
||||
info,
|
||||
cast_info=pychromecast.discovery.CastInfo(
|
||||
services=info.cast_info.services,
|
||||
uuid=FakeUUID,
|
||||
model_name="Chromecast",
|
||||
friendly_name="Speaker",
|
||||
host=info.cast_info.host,
|
||||
port=info.cast_info.port,
|
||||
cast_type="audio",
|
||||
manufacturer="TrollTech",
|
||||
),
|
||||
is_dynamic_group=None,
|
||||
)
|
||||
full_info2 = attr.evolve(
|
||||
info2,
|
||||
cast_info=pychromecast.discovery.CastInfo(
|
||||
services=info.cast_info.services,
|
||||
uuid=FakeUUID,
|
||||
model_name="Model 101",
|
||||
friendly_name="Speaker",
|
||||
host=info.cast_info.host,
|
||||
port=info.cast_info.port,
|
||||
cast_type="cast",
|
||||
manufacturer="Cyberdyne Systems",
|
||||
),
|
||||
is_dynamic_group=None,
|
||||
)
|
||||
|
||||
get_cast_type_mock.assert_not_called()
|
||||
get_cast_type_mock.return_value = full_info.cast_info
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
|
||||
return_value=zconf,
|
||||
):
|
||||
signal = MagicMock()
|
||||
|
||||
async_dispatcher_connect(hass, "cast_discovered", signal)
|
||||
discover_cast(FAKE_MDNS_SERVICE, info)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# when called with incomplete info, it should use HTTP to get missing
|
||||
get_cast_type_mock.assert_called_once()
|
||||
assert get_cast_type_mock.call_count == 1
|
||||
discover = signal.mock_calls[0][1][0]
|
||||
assert discover == full_info
|
||||
assert "Fetched cast details for unknown model 'Chromecast'" in caplog.text
|
||||
|
||||
# Call again, the model name should be fetched from cache
|
||||
discover_cast(FAKE_MDNS_SERVICE, info)
|
||||
await hass.async_block_till_done()
|
||||
assert get_cast_type_mock.call_count == 1 # No additional calls
|
||||
discover = signal.mock_calls[1][1][0]
|
||||
assert discover == full_info
|
||||
|
||||
# Call for another model, need to call HTTP again
|
||||
get_cast_type_mock.return_value = full_info2.cast_info
|
||||
discover_cast(FAKE_MDNS_SERVICE, info2)
|
||||
await hass.async_block_till_done()
|
||||
assert get_cast_type_mock.call_count == 2
|
||||
discover = signal.mock_calls[2][1][0]
|
||||
assert discover == full_info2
|
||||
|
||||
|
||||
async def test_stop_discovery_called_on_stop(hass, castbrowser_mock):
|
||||
"""Test pychromecast.stop_discovery called on shutdown."""
|
||||
# start_discovery should be called with empty config
|
||||
|
||||
@@ -98,16 +98,13 @@ async def test_hassio_system_health_with_issues(hass, aioclient_mock):
|
||||
|
||||
assert info["healthy"] == {
|
||||
"error": "Unhealthy",
|
||||
"more_info": "/hassio/system",
|
||||
"type": "failed",
|
||||
}
|
||||
assert info["supported"] == {
|
||||
"error": "Unsupported",
|
||||
"more_info": "/hassio/system",
|
||||
"type": "failed",
|
||||
}
|
||||
assert info["version_api"] == {
|
||||
"error": "unreachable",
|
||||
"more_info": "/hassio/system",
|
||||
"type": "failed",
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ from homeassistant.components.homekit.const import (
|
||||
MANUFACTURER,
|
||||
SERV_ACCESSORY_INFO,
|
||||
)
|
||||
from homeassistant.components.homekit.util import format_version
|
||||
from homeassistant.const import (
|
||||
ATTR_BATTERY_CHARGING,
|
||||
ATTR_BATTERY_LEVEL,
|
||||
@@ -165,7 +166,7 @@ async def test_home_accessory(hass, hk_driver):
|
||||
serv.get_characteristic(CHAR_SERIAL_NUMBER).value
|
||||
== "light.accessory_that_exceeds_the_maximum_maximum_maximum_maximum"
|
||||
)
|
||||
assert hass_version.startswith(
|
||||
assert format_version(hass_version).startswith(
|
||||
serv.get_characteristic(CHAR_FIRMWARE_REVISION).value
|
||||
)
|
||||
|
||||
@@ -217,7 +218,7 @@ async def test_accessory_with_missing_basic_service_info(hass, hk_driver):
|
||||
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Home Assistant Sensor"
|
||||
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
|
||||
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == entity_id
|
||||
assert hass_version.startswith(
|
||||
assert format_version(hass_version).startswith(
|
||||
serv.get_characteristic(CHAR_FIRMWARE_REVISION).value
|
||||
)
|
||||
assert isinstance(acc.to_HAP(), dict)
|
||||
@@ -247,7 +248,7 @@ async def test_accessory_with_hardware_revision(hass, hk_driver):
|
||||
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Home Assistant Sensor"
|
||||
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
|
||||
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == entity_id
|
||||
assert hass_version.startswith(
|
||||
assert format_version(hass_version).startswith(
|
||||
serv.get_characteristic(CHAR_FIRMWARE_REVISION).value
|
||||
)
|
||||
assert serv.get_characteristic(CHAR_HARDWARE_REVISION).value == "1.2.3"
|
||||
@@ -692,7 +693,7 @@ def test_home_bridge(hk_driver):
|
||||
serv = bridge.services[0] # SERV_ACCESSORY_INFO
|
||||
assert serv.display_name == SERV_ACCESSORY_INFO
|
||||
assert serv.get_characteristic(CHAR_NAME).value == BRIDGE_NAME
|
||||
assert hass_version.startswith(
|
||||
assert format_version(hass_version).startswith(
|
||||
serv.get_characteristic(CHAR_FIRMWARE_REVISION).value
|
||||
)
|
||||
assert serv.get_characteristic(CHAR_MANUFACTURER).value == MANUFACTURER
|
||||
|
||||
@@ -1504,7 +1504,7 @@ async def test_options_flow_exclude_mode_skips_hidden_entities(
|
||||
|
||||
|
||||
@patch(f"{PATH_HOMEKIT}.async_port_is_available", return_value=True)
|
||||
async def test_options_flow_include_mode_skips_hidden_entities(
|
||||
async def test_options_flow_include_mode_allows_hidden_entities(
|
||||
port_mock, hass, mock_get_source_ip, hk_driver, mock_async_zeroconf, entity_reg
|
||||
):
|
||||
"""Ensure include mode does not offer hidden entities."""
|
||||
@@ -1558,24 +1558,28 @@ async def test_options_flow_include_mode_skips_hidden_entities(
|
||||
assert _get_schema_default(result2["data_schema"].schema, "entities") == []
|
||||
|
||||
# sonos_hidden_switch.entity_id is a hidden entity
|
||||
# so it should not be selectable since it will always be excluded
|
||||
with pytest.raises(voluptuous.error.MultipleInvalid):
|
||||
await hass.config_entries.options.async_configure(
|
||||
result2["flow_id"],
|
||||
user_input={"entities": [sonos_hidden_switch.entity_id]},
|
||||
)
|
||||
|
||||
result4 = await hass.config_entries.options.async_configure(
|
||||
# we allow it to be selected in include mode only
|
||||
result3 = await hass.config_entries.options.async_configure(
|
||||
result2["flow_id"],
|
||||
user_input={"entities": ["media_player.tv", "switch.other"]},
|
||||
user_input={
|
||||
"entities": [
|
||||
sonos_hidden_switch.entity_id,
|
||||
"media_player.tv",
|
||||
"switch.other",
|
||||
]
|
||||
},
|
||||
)
|
||||
assert result4["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert result3["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert config_entry.options == {
|
||||
"mode": "bridge",
|
||||
"filter": {
|
||||
"exclude_domains": [],
|
||||
"exclude_entities": [],
|
||||
"include_domains": [],
|
||||
"include_entities": ["media_player.tv", "switch.other"],
|
||||
"include_entities": [
|
||||
sonos_hidden_switch.entity_id,
|
||||
"media_player.tv",
|
||||
"switch.other",
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -522,11 +522,29 @@ def test_validate_topic():
|
||||
|
||||
# Topics "SHOULD NOT" include these special characters
|
||||
# (not MUST NOT, RFC2119). The receiver MAY close the connection.
|
||||
mqtt.util.valid_topic("\u0001")
|
||||
mqtt.util.valid_topic("\u001F")
|
||||
mqtt.util.valid_topic("\u009F")
|
||||
mqtt.util.valid_topic("\u009F")
|
||||
mqtt.util.valid_topic("\uffff")
|
||||
# We enforce this because mosquitto does: https://github.com/eclipse/mosquitto/commit/94fdc9cb44c829ff79c74e1daa6f7d04283dfffd
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\u0001")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\u001F")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\u007F")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\u009F")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\ufdd0")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\ufdef")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\ufffe")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\ufffe")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\uffff")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\U0001fffe")
|
||||
with pytest.raises(vol.Invalid):
|
||||
mqtt.util.valid_topic("\U0001ffff")
|
||||
|
||||
|
||||
def test_validate_subscribe_topic():
|
||||
|
||||
@@ -628,7 +628,7 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25(
|
||||
conn.execute(text("drop table state_attributes;"))
|
||||
conn.commit()
|
||||
|
||||
with patch.object(instance, "migration_in_progress", True):
|
||||
with patch.object(instance, "schema_version", 24):
|
||||
no_attributes = True
|
||||
hist = history.state_changes_during_period(
|
||||
hass, start, end, entity_id, no_attributes, include_start_time_state=False
|
||||
@@ -674,7 +674,7 @@ async def test_get_states_query_during_migration_to_schema_25(
|
||||
conn.execute(text("drop table state_attributes;"))
|
||||
conn.commit()
|
||||
|
||||
with patch.object(instance, "migration_in_progress", True):
|
||||
with patch.object(instance, "schema_version", 24):
|
||||
no_attributes = True
|
||||
hist = await _async_get_states(
|
||||
hass, end, [entity_id], no_attributes=no_attributes
|
||||
@@ -723,7 +723,7 @@ async def test_get_states_query_during_migration_to_schema_25_multiple_entities(
|
||||
conn.execute(text("drop table state_attributes;"))
|
||||
conn.commit()
|
||||
|
||||
with patch.object(instance, "migration_in_progress", True):
|
||||
with patch.object(instance, "schema_version", 24):
|
||||
no_attributes = True
|
||||
hist = await _async_get_states(
|
||||
hass, end, entity_ids, no_attributes=no_attributes
|
||||
|
||||
@@ -31,6 +31,7 @@ from homeassistant.components.recorder import (
|
||||
)
|
||||
from homeassistant.components.recorder.const import DATA_INSTANCE
|
||||
from homeassistant.components.recorder.models import (
|
||||
SCHEMA_VERSION,
|
||||
Events,
|
||||
RecorderRuns,
|
||||
StateAttributes,
|
||||
@@ -438,6 +439,12 @@ def _state_empty_context(hass, entity_id):
|
||||
return state
|
||||
|
||||
|
||||
def test_setup_without_migration(hass_recorder):
|
||||
"""Verify the schema version without a migration."""
|
||||
hass = hass_recorder()
|
||||
assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION
|
||||
|
||||
|
||||
# pylint: disable=redefined-outer-name,invalid-name
|
||||
def test_saving_state_include_domains(hass_recorder):
|
||||
"""Test saving and restoring a state."""
|
||||
|
||||
@@ -22,7 +22,11 @@ from homeassistant.bootstrap import async_setup_component
|
||||
from homeassistant.components import persistent_notification as pn, recorder
|
||||
from homeassistant.components.recorder import migration, models
|
||||
from homeassistant.components.recorder.const import DATA_INSTANCE
|
||||
from homeassistant.components.recorder.models import RecorderRuns, States
|
||||
from homeassistant.components.recorder.models import (
|
||||
SCHEMA_VERSION,
|
||||
RecorderRuns,
|
||||
States,
|
||||
)
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
@@ -79,6 +83,7 @@ async def test_migration_in_progress(hass):
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
assert recorder.util.async_migration_in_progress(hass) is False
|
||||
assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION
|
||||
|
||||
|
||||
async def test_database_migration_failed(hass):
|
||||
|
||||
@@ -22,7 +22,7 @@ from samsungtvws.remote import ChannelEmitCommand
|
||||
from homeassistant.components.samsungtv.const import WEBSOCKET_SSL_PORT
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import SAMPLE_DEVICE_INFO_WIFI
|
||||
from .const import SAMPLE_DEVICE_INFO_UE48JU6400, SAMPLE_DEVICE_INFO_WIFI
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -177,7 +177,7 @@ def rest_api_fixture_non_ssl_only() -> Mock:
|
||||
"""Mock rest_device_info to fail for ssl and work for non-ssl."""
|
||||
if self.port == WEBSOCKET_SSL_PORT:
|
||||
raise ResponseError
|
||||
return SAMPLE_DEVICE_INFO_WIFI
|
||||
return SAMPLE_DEVICE_INFO_UE48JU6400
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVAsyncRest",
|
||||
|
||||
@@ -340,16 +340,16 @@ async def test_user_encrypted_websocket(
|
||||
)
|
||||
|
||||
assert result4["type"] == "create_entry"
|
||||
assert result4["title"] == "Living Room (82GXARRS)"
|
||||
assert result4["title"] == "TV-UE48JU6470 (UE48JU6400)"
|
||||
assert result4["data"][CONF_HOST] == "fake_host"
|
||||
assert result4["data"][CONF_NAME] == "Living Room"
|
||||
assert result4["data"][CONF_NAME] == "TV-UE48JU6470"
|
||||
assert result4["data"][CONF_MAC] == "aa:bb:ww:ii:ff:ii"
|
||||
assert result4["data"][CONF_MANUFACTURER] == "Samsung"
|
||||
assert result4["data"][CONF_MODEL] == "82GXARRS"
|
||||
assert result4["data"][CONF_MODEL] == "UE48JU6400"
|
||||
assert result4["data"][CONF_SSDP_RENDERING_CONTROL_LOCATION] is None
|
||||
assert result4["data"][CONF_TOKEN] == "037739871315caef138547b03e348b72"
|
||||
assert result4["data"][CONF_SESSION_ID] == "1"
|
||||
assert result4["result"].unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4"
|
||||
assert result4["result"].unique_id == "223da676-497a-4e06-9507-5e27ec4f0fb3"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("rest_api_failing")
|
||||
@@ -714,19 +714,19 @@ async def test_ssdp_encrypted_websocket_success_populates_mac_address_and_ssdp_l
|
||||
)
|
||||
|
||||
assert result4["type"] == "create_entry"
|
||||
assert result4["title"] == "Living Room (82GXARRS)"
|
||||
assert result4["title"] == "TV-UE48JU6470 (UE48JU6400)"
|
||||
assert result4["data"][CONF_HOST] == "fake_host"
|
||||
assert result4["data"][CONF_NAME] == "Living Room"
|
||||
assert result4["data"][CONF_NAME] == "TV-UE48JU6470"
|
||||
assert result4["data"][CONF_MAC] == "aa:bb:ww:ii:ff:ii"
|
||||
assert result4["data"][CONF_MANUFACTURER] == "Samsung fake_manufacturer"
|
||||
assert result4["data"][CONF_MODEL] == "82GXARRS"
|
||||
assert result4["data"][CONF_MODEL] == "UE48JU6400"
|
||||
assert (
|
||||
result4["data"][CONF_SSDP_RENDERING_CONTROL_LOCATION]
|
||||
== "https://fake_host:12345/test"
|
||||
)
|
||||
assert result4["data"][CONF_TOKEN] == "037739871315caef138547b03e348b72"
|
||||
assert result4["data"][CONF_SESSION_ID] == "1"
|
||||
assert result4["result"].unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4"
|
||||
assert result4["result"].unique_id == "223da676-497a-4e06-9507-5e27ec4f0fb3"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("rest_api_non_ssl_only")
|
||||
@@ -1036,13 +1036,13 @@ async def test_dhcp_wireless(hass: HomeAssistant) -> None:
|
||||
result["flow_id"], user_input="whatever"
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["title"] == "Living Room (82GXARRS)"
|
||||
assert result["title"] == "TV-UE48JU6470 (UE48JU6400)"
|
||||
assert result["data"][CONF_HOST] == "fake_host"
|
||||
assert result["data"][CONF_NAME] == "Living Room"
|
||||
assert result["data"][CONF_NAME] == "TV-UE48JU6470"
|
||||
assert result["data"][CONF_MAC] == "aa:bb:ww:ii:ff:ii"
|
||||
assert result["data"][CONF_MANUFACTURER] == "Samsung"
|
||||
assert result["data"][CONF_MODEL] == "82GXARRS"
|
||||
assert result["result"].unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4"
|
||||
assert result["data"][CONF_MODEL] == "UE48JU6400"
|
||||
assert result["result"].unique_id == "223da676-497a-4e06-9507-5e27ec4f0fb3"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("remotews", "rest_api", "remoteencws_failing")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The tests for the Script component."""
|
||||
# pylint: disable=protected-access
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
@@ -33,12 +34,13 @@ from homeassistant.helpers.script import (
|
||||
SCRIPT_MODE_QUEUED,
|
||||
SCRIPT_MODE_RESTART,
|
||||
SCRIPT_MODE_SINGLE,
|
||||
_async_stop_scripts_at_shutdown,
|
||||
)
|
||||
from homeassistant.helpers.service import async_get_all_descriptions
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import async_mock_service, mock_restore_cache
|
||||
from tests.common import async_fire_time_changed, async_mock_service, mock_restore_cache
|
||||
from tests.components.logbook.test_init import MockLazyEventPartialState
|
||||
|
||||
ENTITY_ID = "script.test"
|
||||
@@ -919,6 +921,91 @@ async def test_recursive_script_indirect(hass, script_mode, warning_msg, caplog)
|
||||
assert warning_msg in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"script_mode", [SCRIPT_MODE_PARALLEL, SCRIPT_MODE_QUEUED, SCRIPT_MODE_RESTART]
|
||||
)
|
||||
async def test_recursive_script_turn_on(hass: HomeAssistant, script_mode, caplog):
|
||||
"""Test script turning itself on.
|
||||
|
||||
- Illegal recursion detection should not be triggered
|
||||
- Home Assistant should not hang on shut down
|
||||
- SCRIPT_MODE_SINGLE is not relevant because suca script can't turn itself on
|
||||
"""
|
||||
# Make sure we cover all script modes
|
||||
assert SCRIPT_MODE_CHOICES == [
|
||||
SCRIPT_MODE_PARALLEL,
|
||||
SCRIPT_MODE_QUEUED,
|
||||
SCRIPT_MODE_RESTART,
|
||||
SCRIPT_MODE_SINGLE,
|
||||
]
|
||||
stop_scripts_at_shutdown_called = asyncio.Event()
|
||||
real_stop_scripts_at_shutdown = _async_stop_scripts_at_shutdown
|
||||
|
||||
async def stop_scripts_at_shutdown(*args):
|
||||
await real_stop_scripts_at_shutdown(*args)
|
||||
stop_scripts_at_shutdown_called.set()
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.script._async_stop_scripts_at_shutdown",
|
||||
wraps=stop_scripts_at_shutdown,
|
||||
):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
script.DOMAIN,
|
||||
{
|
||||
script.DOMAIN: {
|
||||
"script1": {
|
||||
"mode": script_mode,
|
||||
"sequence": [
|
||||
{
|
||||
"choose": {
|
||||
"conditions": {
|
||||
"condition": "template",
|
||||
"value_template": "{{ request == 'step_2' }}",
|
||||
},
|
||||
"sequence": {"service": "test.script_done"},
|
||||
},
|
||||
"default": {
|
||||
"service": "script.turn_on",
|
||||
"data": {
|
||||
"entity_id": "script.script1",
|
||||
"variables": {"request": "step_2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"service": "script.turn_on",
|
||||
"data": {"entity_id": "script.script1"},
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
service_called = asyncio.Event()
|
||||
|
||||
async def async_service_handler(service):
|
||||
if service.service == "script_done":
|
||||
service_called.set()
|
||||
|
||||
hass.services.async_register("test", "script_done", async_service_handler)
|
||||
|
||||
await hass.services.async_call("script", "script1")
|
||||
await asyncio.wait_for(service_called.wait(), 1)
|
||||
|
||||
# Trigger 1st stage script shutdown
|
||||
hass.state = CoreState.stopping
|
||||
hass.bus.async_fire("homeassistant_stop")
|
||||
await asyncio.wait_for(stop_scripts_at_shutdown_called.wait(), 1)
|
||||
|
||||
# Trigger 2nd stage script shutdown
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=90))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "Disallowed recursion detected" not in caplog.text
|
||||
|
||||
|
||||
async def test_setup_with_duplicate_scripts(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
|
||||
@@ -214,9 +214,62 @@ async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
"name": "Get Value",
|
||||
"db_url": "sqlite://",
|
||||
"query": "SELECT 5 as size",
|
||||
"column": "size",
|
||||
"value_template": None,
|
||||
"unit_of_measurement": "MiB",
|
||||
}
|
||||
|
||||
|
||||
async def test_options_flow_name_previously_removed(hass: HomeAssistant) -> None:
|
||||
"""Test options config flow where the name was missing."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={},
|
||||
options={
|
||||
"db_url": "sqlite://",
|
||||
"query": "SELECT 5 as value",
|
||||
"column": "value",
|
||||
"unit_of_measurement": "MiB",
|
||||
"value_template": None,
|
||||
},
|
||||
title="Get Value Title",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.options.async_init(entry.entry_id)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "init"
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sql.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
"db_url": "sqlite://",
|
||||
"query": "SELECT 5 as size",
|
||||
"column": "size",
|
||||
"unit_of_measurement": "MiB",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
"name": "Get Value Title",
|
||||
"db_url": "sqlite://",
|
||||
"query": "SELECT 5 as size",
|
||||
"column": "size",
|
||||
"value_template": None,
|
||||
"unit_of_measurement": "MiB",
|
||||
}
|
||||
|
||||
@@ -312,6 +365,8 @@ async def test_options_flow_fails_invalid_query(
|
||||
|
||||
assert result4["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result4["data"] == {
|
||||
"name": "Get Value",
|
||||
"value_template": None,
|
||||
"db_url": "sqlite://",
|
||||
"query": "SELECT 5 as size",
|
||||
"column": "size",
|
||||
|
||||
@@ -653,6 +653,120 @@ async def test_this_variable(hass, start_ha):
|
||||
assert hass.states.get(TEST_NAME).state == "It Works: " + TEST_NAME
|
||||
|
||||
|
||||
@pytest.mark.parametrize("count,domain", [(1, "template")])
|
||||
@pytest.mark.parametrize(
|
||||
"config",
|
||||
[
|
||||
{
|
||||
"template": {
|
||||
"sensor": {
|
||||
"state": "{{ this.attributes.get('test', 'no-test!') }}: {{ this.entity_id }}",
|
||||
"icon": "mdi:{% if this.entity_id in states and 'friendly_name' in this.attributes %} {{this.attributes['friendly_name']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"name": "{% if this.entity_id in states and 'friendly_name' in this.attributes %} {{this.attributes['friendly_name']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"picture": "{% if this.entity_id in states and 'entity_picture' in this.attributes %} {{this.attributes['entity_picture']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"attributes": {"test": "{{ this.entity_id }}"},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
)
|
||||
async def test_this_variable_early_hass_not_running(hass, config, count, domain):
|
||||
"""Test referencing 'this' variable before the entity is in the state machine.
|
||||
|
||||
Hass is not yet started when the entity is added.
|
||||
Icon, name and picture templates are rendered once in the constructor.
|
||||
"""
|
||||
entity_id = "sensor.none_false"
|
||||
|
||||
hass.state = CoreState.not_running
|
||||
|
||||
# Setup template
|
||||
with assert_setup_component(count, domain):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
domain,
|
||||
config,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Sensor state not rendered, icon, name and picture
|
||||
# templates rendered in constructor with entity_id set to None
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == "unknown"
|
||||
assert state.attributes == {
|
||||
"entity_picture": "None:False",
|
||||
"friendly_name": "None:False",
|
||||
"icon": "mdi:None:False",
|
||||
}
|
||||
|
||||
# Signal hass started
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-render icon, name, pciture + other templates now rendered
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == "sensor.none_false: sensor.none_false"
|
||||
assert state.attributes == {
|
||||
"entity_picture": "sensor.none_false:False",
|
||||
"friendly_name": "sensor.none_false:False",
|
||||
"icon": "mdi:sensor.none_false:False",
|
||||
"test": "sensor.none_false",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("count,domain", [(1, "template")])
|
||||
@pytest.mark.parametrize(
|
||||
"config",
|
||||
[
|
||||
{
|
||||
"template": {
|
||||
"sensor": {
|
||||
"state": "{{ this.attributes.get('test', 'no-test!') }}: {{ this.entity_id }}",
|
||||
"icon": "mdi:{% if this.entity_id in states and 'friendly_name' in this.attributes %} {{this.attributes['friendly_name']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"name": "{% if this.entity_id in states and 'friendly_name' in this.attributes %} {{this.attributes['friendly_name']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"picture": "{% if this.entity_id in states and 'entity_picture' in this.attributes %} {{this.attributes['entity_picture']}} {% else %}{{this.entity_id}}:{{this.entity_id in states}}{% endif %}",
|
||||
"attributes": {"test": "{{ this.entity_id }}"},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
)
|
||||
async def test_this_variable_early_hass_running(hass, config, count, domain):
|
||||
"""Test referencing 'this' variable before the entity is in the state machine.
|
||||
|
||||
Hass is already started when the entity is added.
|
||||
Icon, name and picture templates are rendered in the constructor, and again
|
||||
before the entity is added to hass.
|
||||
"""
|
||||
|
||||
# Start hass
|
||||
assert hass.state == CoreState.running
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Setup template
|
||||
with assert_setup_component(count, domain):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
domain,
|
||||
config,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "sensor.none_false"
|
||||
# All templated rendered
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == "sensor.none_false: sensor.none_false"
|
||||
assert state.attributes == {
|
||||
"entity_picture": "sensor.none_false:False",
|
||||
"friendly_name": "sensor.none_false:False",
|
||||
"icon": "mdi:sensor.none_false:False",
|
||||
"test": "sensor.none_false",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("count,domain", [(1, sensor.DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config",
|
||||
|
||||
@@ -1539,6 +1539,7 @@ async def test_trace_blueprint_automation(
|
||||
"input": {
|
||||
"trigger_event": "blueprint_event",
|
||||
"service_to_call": "test.automation",
|
||||
"a_number": 5,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
1
tests/components/ukraine_alarm/__init__.py
Normal file
1
tests/components/ukraine_alarm/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for the Ukraine Alarm integration."""
|
||||
354
tests/components/ukraine_alarm/test_config_flow.py
Normal file
354
tests/components/ukraine_alarm/test_config_flow.py
Normal file
@@ -0,0 +1,354 @@
|
||||
"""Test the Ukraine Alarm config flow."""
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aiohttp import ClientConnectionError, ClientError, ClientResponseError
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.ukraine_alarm.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM
|
||||
|
||||
MOCK_API_KEY = "mock-api-key"
|
||||
|
||||
|
||||
def _region(rid, recurse=0, depth=0):
|
||||
if depth == 0:
|
||||
name_prefix = "State"
|
||||
elif depth == 1:
|
||||
name_prefix = "District"
|
||||
else:
|
||||
name_prefix = "Community"
|
||||
|
||||
name = f"{name_prefix} {rid}"
|
||||
region = {"regionId": rid, "regionName": name, "regionChildIds": []}
|
||||
|
||||
if not recurse:
|
||||
return region
|
||||
|
||||
for i in range(1, 4):
|
||||
region["regionChildIds"].append(_region(f"{rid}.{i}", recurse - 1, depth + 1))
|
||||
|
||||
return region
|
||||
|
||||
|
||||
REGIONS = {
|
||||
"states": [_region(f"{i}", i - 1) for i in range(1, 4)],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_get_regions() -> Generator[None, AsyncMock, None]:
|
||||
"""Mock the get_regions method."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.ukraine_alarm.config_flow.Client.get_regions",
|
||||
return_value=REGIONS,
|
||||
) as mock_get:
|
||||
yield mock_get
|
||||
|
||||
|
||||
async def test_state(hass: HomeAssistant) -> None:
|
||||
"""Test we can create entry for state."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.ukraine_alarm.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "1",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result3["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result3["title"] == "State 1"
|
||||
assert result3["data"] == {
|
||||
"api_key": MOCK_API_KEY,
|
||||
"region": "1",
|
||||
"name": result3["title"],
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_state_district(hass: HomeAssistant) -> None:
|
||||
"""Test we can create entry for state + district."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "2",
|
||||
},
|
||||
)
|
||||
assert result3["type"] == RESULT_TYPE_FORM
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.ukraine_alarm.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result4 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "2.2",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result4["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result4["title"] == "District 2.2"
|
||||
assert result4["data"] == {
|
||||
"api_key": MOCK_API_KEY,
|
||||
"region": "2.2",
|
||||
"name": result4["title"],
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_state_district_pick_region(hass: HomeAssistant) -> None:
|
||||
"""Test we can create entry for region which has districts."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "2",
|
||||
},
|
||||
)
|
||||
assert result3["type"] == RESULT_TYPE_FORM
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.ukraine_alarm.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result4 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "2",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result4["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result4["title"] == "State 2"
|
||||
assert result4["data"] == {
|
||||
"api_key": MOCK_API_KEY,
|
||||
"region": "2",
|
||||
"name": result4["title"],
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_state_district_community(hass: HomeAssistant) -> None:
|
||||
"""Test we can create entry for state + district + community."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "3",
|
||||
},
|
||||
)
|
||||
assert result3["type"] == RESULT_TYPE_FORM
|
||||
|
||||
result4 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "3.2",
|
||||
},
|
||||
)
|
||||
assert result4["type"] == RESULT_TYPE_FORM
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.ukraine_alarm.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result5 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"region": "3.2.1",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result5["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result5["title"] == "Community 3.2.1"
|
||||
assert result5["data"] == {
|
||||
"api_key": MOCK_API_KEY,
|
||||
"region": "3.2.1",
|
||||
"name": result5["title"],
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_invalid_api(hass: HomeAssistant, mock_get_regions: AsyncMock) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.side_effect = ClientResponseError(None, None, status=401)
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "invalid_api_key"}
|
||||
|
||||
|
||||
async def test_server_error(hass: HomeAssistant, mock_get_regions) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.side_effect = ClientResponseError(None, None, status=500)
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "unknown"}
|
||||
|
||||
|
||||
async def test_cannot_connect(hass: HomeAssistant, mock_get_regions: AsyncMock) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.side_effect = ClientConnectionError
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
|
||||
async def test_unknown_client_error(
|
||||
hass: HomeAssistant, mock_get_regions: AsyncMock
|
||||
) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.side_effect = ClientError
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "unknown"}
|
||||
|
||||
|
||||
async def test_timeout_error(hass: HomeAssistant, mock_get_regions: AsyncMock) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.side_effect = asyncio.TimeoutError
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "timeout"}
|
||||
|
||||
|
||||
async def test_no_regions_returned(
|
||||
hass: HomeAssistant, mock_get_regions: AsyncMock
|
||||
) -> None:
|
||||
"""Test we can create entry for just region."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
|
||||
mock_get_regions.return_value = {}
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"api_key": MOCK_API_KEY,
|
||||
},
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "user"
|
||||
assert result2["errors"] == {"base": "unknown"}
|
||||
@@ -11,7 +11,7 @@ import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv, selector, template
|
||||
|
||||
|
||||
def test_boolean():
|
||||
@@ -720,6 +720,17 @@ def test_string_in_serializer():
|
||||
}
|
||||
|
||||
|
||||
def test_selector_in_serializer():
|
||||
"""Test selector with custom_serializer."""
|
||||
assert cv.custom_serializer(selector.selector({"text": {}})) == {
|
||||
"selector": {
|
||||
"text": {
|
||||
"multiline": False,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_positive_time_period_dict_in_serializer():
|
||||
"""Test positive_time_period_dict with custom_serializer."""
|
||||
assert cv.custom_serializer(cv.positive_time_period_dict) == {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user