forked from home-assistant/core
Compare commits
59 Commits
2022.2.0b1
...
2022.2.0b4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ef143b5eb2 | ||
|
|
5d7aefa0b4 | ||
|
|
6b6bd381fd | ||
|
|
252f5f6b35 | ||
|
|
8bdee9cb1c | ||
|
|
7e350b8347 | ||
|
|
ac8a1248f9 | ||
|
|
ffe262abce | ||
|
|
5174e68b16 | ||
|
|
6e4c281e15 | ||
|
|
8e71e2e8ee | ||
|
|
26905115c8 | ||
|
|
eca3514f9e | ||
|
|
305ffc4ab6 | ||
|
|
508fd0cb2a | ||
|
|
5368fb6d54 | ||
|
|
d6527953c3 | ||
|
|
14c969ef6d | ||
|
|
f6f25fa4ff | ||
|
|
dcf6e61d4f | ||
|
|
2041d4c118 | ||
|
|
b40bcecac0 | ||
|
|
2ed20df906 | ||
|
|
1a6964448c | ||
|
|
3dde12f887 | ||
|
|
cd6c182c07 | ||
|
|
f8e0c41e91 | ||
|
|
5f56107116 | ||
|
|
fb3c99a891 | ||
|
|
ca505b79b5 | ||
|
|
c74a8bf65a | ||
|
|
406801ef73 | ||
|
|
2bfedcbdc5 | ||
|
|
84f817eb25 | ||
|
|
4ead2f2f7e | ||
|
|
421f9716a7 | ||
|
|
25e6d8858c | ||
|
|
3829a81d15 | ||
|
|
9318843867 | ||
|
|
4eb787b619 | ||
|
|
8e38b7624e | ||
|
|
fdb52df7b7 | ||
|
|
6c3e8b06ea | ||
|
|
6ba52b1c86 | ||
|
|
1e60958fc4 | ||
|
|
0f9e65e687 | ||
|
|
d382e24e5b | ||
|
|
82acaa380c | ||
|
|
0a00177a8f | ||
|
|
34cf82b017 | ||
|
|
44403dab62 | ||
|
|
909b0fb689 | ||
|
|
3f763ddc9a | ||
|
|
837d49f67b | ||
|
|
735edd83fc | ||
|
|
7415513352 | ||
|
|
6f20a75583 | ||
|
|
05d7fef9f0 | ||
|
|
2ff8f10b9f |
@@ -560,12 +560,7 @@ omit =
|
||||
homeassistant/components/knx/__init__.py
|
||||
homeassistant/components/knx/climate.py
|
||||
homeassistant/components/knx/cover.py
|
||||
homeassistant/components/knx/diagnostics.py
|
||||
homeassistant/components/knx/expose.py
|
||||
homeassistant/components/knx/knx_entity.py
|
||||
homeassistant/components/knx/light.py
|
||||
homeassistant/components/knx/notify.py
|
||||
homeassistant/components/knx/schema.py
|
||||
homeassistant/components/kodi/__init__.py
|
||||
homeassistant/components/kodi/browse_media.py
|
||||
homeassistant/components/kodi/const.py
|
||||
|
||||
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@@ -76,8 +76,10 @@ jobs:
|
||||
- name: Build package
|
||||
shell: bash
|
||||
run: |
|
||||
pip install twine wheel
|
||||
python setup.py sdist bdist_wheel
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
python -m build
|
||||
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
|
||||
@@ -107,7 +107,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/manifest\.json|setup\.py|\.pre-commit-config\.yaml|script/gen_requirements_all\.py)$
|
||||
files: ^(homeassistant/.+/manifest\.json|setup\.cfg|\.pre-commit-config\.yaml|script/gen_requirements_all\.py)$
|
||||
- id: hassfest
|
||||
name: hassfest
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest
|
||||
@@ -115,3 +115,10 @@ repos:
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(manifest|strings)\.json|\.coveragerc|\.strict-typing|homeassistant/.+/services\.yaml|script/hassfest/.+\.py)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/.+\.py|homeassistant/const\.py$|setup\.cfg)$
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
include README.rst
|
||||
include LICENSE.md
|
||||
graft homeassistant
|
||||
recursive-exclude * *.py[co]
|
||||
|
||||
@@ -8,7 +8,11 @@
|
||||
{
|
||||
"hostname": "blink*",
|
||||
"macaddress": "B85F98*"
|
||||
}
|
||||
},
|
||||
{
|
||||
"hostname": "blink*",
|
||||
"macaddress": "00037F*"
|
||||
}
|
||||
],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import datetime, timedelta
|
||||
import functools as ft
|
||||
import json
|
||||
import logging
|
||||
from urllib.parse import quote
|
||||
@@ -461,26 +460,10 @@ class CastDevice(MediaPlayerEntity):
|
||||
media_controller = self._media_controller()
|
||||
media_controller.seek(position)
|
||||
|
||||
async def async_browse_media(self, media_content_type=None, media_content_id=None):
|
||||
"""Implement the websocket media browsing helper."""
|
||||
kwargs = {}
|
||||
async def _async_root_payload(self, content_filter):
|
||||
"""Generate root node."""
|
||||
children = []
|
||||
|
||||
if self._chromecast.cast_type == pychromecast.const.CAST_TYPE_AUDIO:
|
||||
kwargs["content_filter"] = lambda item: item.media_content_type.startswith(
|
||||
"audio/"
|
||||
)
|
||||
|
||||
if plex.is_plex_media_id(media_content_id):
|
||||
return await plex.async_browse_media(
|
||||
self.hass, media_content_type, media_content_id, platform=CAST_DOMAIN
|
||||
)
|
||||
|
||||
if media_content_type == "plex":
|
||||
return await plex.async_browse_media(
|
||||
self.hass, None, None, platform=CAST_DOMAIN
|
||||
)
|
||||
|
||||
# Add external sources
|
||||
if "plex" in self.hass.config.components:
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
@@ -494,15 +477,17 @@ class CastDevice(MediaPlayerEntity):
|
||||
)
|
||||
)
|
||||
|
||||
# Add local media source
|
||||
try:
|
||||
result = await media_source.async_browse_media(
|
||||
self.hass, media_content_id, **kwargs
|
||||
self.hass, None, content_filter=content_filter
|
||||
)
|
||||
children.append(result)
|
||||
except BrowseError:
|
||||
if not children:
|
||||
raise
|
||||
|
||||
# If there's only one media source, resolve it
|
||||
if len(children) == 1:
|
||||
return await self.async_browse_media(
|
||||
children[0].media_content_type,
|
||||
@@ -519,6 +504,34 @@ class CastDevice(MediaPlayerEntity):
|
||||
children=children,
|
||||
)
|
||||
|
||||
async def async_browse_media(self, media_content_type=None, media_content_id=None):
|
||||
"""Implement the websocket media browsing helper."""
|
||||
content_filter = None
|
||||
|
||||
if self._chromecast.cast_type == pychromecast.const.CAST_TYPE_AUDIO:
|
||||
|
||||
def audio_content_filter(item):
|
||||
"""Filter non audio content."""
|
||||
return item.media_content_type.startswith("audio/")
|
||||
|
||||
content_filter = audio_content_filter
|
||||
|
||||
if media_content_id is None:
|
||||
return await self._async_root_payload(content_filter)
|
||||
|
||||
if plex.is_plex_media_id(media_content_id):
|
||||
return await plex.async_browse_media(
|
||||
self.hass, media_content_type, media_content_id, platform=CAST_DOMAIN
|
||||
)
|
||||
if media_content_type == "plex":
|
||||
return await plex.async_browse_media(
|
||||
self.hass, None, None, platform=CAST_DOMAIN
|
||||
)
|
||||
|
||||
return await media_source.async_browse_media(
|
||||
self.hass, media_content_id, content_filter=content_filter
|
||||
)
|
||||
|
||||
async def async_play_media(self, media_type, media_id, **kwargs):
|
||||
"""Play a piece of media."""
|
||||
# Handle media_source
|
||||
@@ -540,12 +553,6 @@ class CastDevice(MediaPlayerEntity):
|
||||
hass_url = get_url(self.hass, prefer_external=True)
|
||||
media_id = f"{hass_url}{media_id}"
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
ft.partial(self.play_media, media_type, media_id, **kwargs)
|
||||
)
|
||||
|
||||
def play_media(self, media_type, media_id, **kwargs):
|
||||
"""Play media from a URL."""
|
||||
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
|
||||
metadata = extra.get("metadata")
|
||||
|
||||
@@ -564,7 +571,9 @@ class CastDevice(MediaPlayerEntity):
|
||||
if "app_id" in app_data:
|
||||
app_id = app_data.pop("app_id")
|
||||
_LOGGER.info("Starting Cast app by ID %s", app_id)
|
||||
self._chromecast.start_app(app_id)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._chromecast.start_app, app_id
|
||||
)
|
||||
if app_data:
|
||||
_LOGGER.warning(
|
||||
"Extra keys %s were ignored. Please use app_name to cast media",
|
||||
@@ -574,21 +583,28 @@ class CastDevice(MediaPlayerEntity):
|
||||
|
||||
app_name = app_data.pop("app_name")
|
||||
try:
|
||||
quick_play(self._chromecast, app_name, app_data)
|
||||
await self.hass.async_add_executor_job(
|
||||
quick_play, self._chromecast, app_name, app_data
|
||||
)
|
||||
except NotImplementedError:
|
||||
_LOGGER.error("App %s not supported", app_name)
|
||||
|
||||
# Handle plex
|
||||
elif media_id and media_id.startswith(PLEX_URI_SCHEME):
|
||||
media_id = media_id[len(PLEX_URI_SCHEME) :]
|
||||
media = lookup_plex_media(self.hass, media_type, media_id)
|
||||
media = await self.hass.async_add_executor_job(
|
||||
lookup_plex_media, self.hass, media_type, media_id
|
||||
)
|
||||
if media is None:
|
||||
return
|
||||
controller = PlexController()
|
||||
self._chromecast.register_handler(controller)
|
||||
controller.play_media(media)
|
||||
await self.hass.async_add_executor_job(controller.play_media, media)
|
||||
else:
|
||||
app_data = {"media_id": media_id, "media_type": media_type, **extra}
|
||||
quick_play(self._chromecast, "default_media_receiver", app_data)
|
||||
await self.hass.async_add_executor_job(
|
||||
quick_play, self._chromecast, "default_media_receiver", app_data
|
||||
)
|
||||
|
||||
def _media_status(self):
|
||||
"""
|
||||
|
||||
@@ -46,7 +46,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Enable asyncio debugging and start the debugger."""
|
||||
get_running_loop().set_debug(True)
|
||||
|
||||
debugpy.listen((conf[CONF_HOST], conf[CONF_PORT]))
|
||||
await hass.async_add_executor_job(
|
||||
debugpy.listen, (conf[CONF_HOST], conf[CONF_PORT])
|
||||
)
|
||||
|
||||
if conf[CONF_WAIT]:
|
||||
_LOGGER.warning(
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any
|
||||
|
||||
from pydeconz.group import DeconzScene as PydeconzScene
|
||||
|
||||
from homeassistant.components.scene import Scene
|
||||
from homeassistant.components.scene import DOMAIN, Scene
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -23,6 +23,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up scenes for deCONZ component."""
|
||||
gateway = get_gateway_from_config_entry(hass, config_entry)
|
||||
gateway.entities[DOMAIN] = set()
|
||||
|
||||
@callback
|
||||
def async_add_scene(
|
||||
@@ -30,7 +31,11 @@ async def async_setup_entry(
|
||||
| ValuesView[PydeconzScene] = gateway.api.scenes.values(),
|
||||
) -> None:
|
||||
"""Add scene from deCONZ."""
|
||||
entities = [DeconzScene(scene, gateway) for scene in scenes]
|
||||
entities = [
|
||||
DeconzScene(scene, gateway)
|
||||
for scene in scenes
|
||||
if scene.deconz_id not in gateway.entities[DOMAIN]
|
||||
]
|
||||
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
@@ -59,10 +64,12 @@ class DeconzScene(Scene):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to sensors events."""
|
||||
self.gateway.deconz_ids[self.entity_id] = self._scene.deconz_id
|
||||
self.gateway.entities[DOMAIN].add(self._scene.deconz_id)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect scene object when removed."""
|
||||
del self.gateway.deconz_ids[self.entity_id]
|
||||
self.gateway.entities[DOMAIN].remove(self._scene.deconz_id)
|
||||
self._scene = None
|
||||
|
||||
async def async_activate(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -170,7 +170,7 @@ async def _async_get_json_file_response(
|
||||
return web.Response(
|
||||
body=json_data,
|
||||
content_type="application/json",
|
||||
headers={"Content-Disposition": f'attachment; filename="{filename}.json"'},
|
||||
headers={"Content-Disposition": f'attachment; filename="{filename}.json.txt"'},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,19 +2,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable, Mapping
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar, cast
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import REDACTED
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@callback
|
||||
def async_redact_data(data: Mapping, to_redact: Iterable[Any]) -> dict[str, Any]:
|
||||
def async_redact_data(data: T, to_redact: Iterable[Any]) -> T:
|
||||
"""Redact sensitive data in a dict."""
|
||||
if not isinstance(data, (Mapping, list)):
|
||||
return data
|
||||
|
||||
if isinstance(data, list):
|
||||
return cast(T, [async_redact_data(val, to_redact) for val in data])
|
||||
|
||||
redacted = {**data}
|
||||
|
||||
for key, value in redacted.items():
|
||||
@@ -25,4 +30,4 @@ def async_redact_data(data: Mapping, to_redact: Iterable[Any]) -> dict[str, Any]
|
||||
elif isinstance(value, list):
|
||||
redacted[key] = [async_redact_data(item, to_redact) for item in value]
|
||||
|
||||
return redacted
|
||||
return cast(T, redacted)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""The Flick Electric integration."""
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
@@ -18,7 +20,9 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_TOKEN_EXPIRES_IN, CONF_TOKEN_EXPIRY, DOMAIN
|
||||
from .const import CONF_TOKEN_EXPIRY, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_ID_TOKEN = "id_token"
|
||||
|
||||
@@ -69,6 +73,8 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
return self._entry.data[CONF_ACCESS_TOKEN]
|
||||
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await self.get_new_token(
|
||||
username=self._entry.data[CONF_USERNAME],
|
||||
password=self._entry.data[CONF_PASSWORD],
|
||||
@@ -78,15 +84,19 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
),
|
||||
)
|
||||
|
||||
# Reduce expiry by an hour to avoid API being called after expiry
|
||||
expiry = dt.now().timestamp() + int(token[CONF_TOKEN_EXPIRES_IN] - 3600)
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
# Flick will send the same token, but expiry is relative - so grab it from the token
|
||||
token_decoded = jwt.decode(
|
||||
token[CONF_ID_TOKEN], options={"verify_signature": False}
|
||||
)
|
||||
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._entry,
|
||||
data={
|
||||
**self._entry.data,
|
||||
CONF_ACCESS_TOKEN: token,
|
||||
CONF_TOKEN_EXPIRY: expiry,
|
||||
CONF_TOKEN_EXPIRY: token_decoded["exp"],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRES_IN = "expires_in"
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
|
||||
@@ -15,8 +15,6 @@ from homeassistant.util.dt import utcnow
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_AUTH_URL = "https://api.flick.energy/identity/oauth/token"
|
||||
_RESOURCE = "https://api.flick.energy/customer/mobile_provider/price"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
@@ -71,6 +69,8 @@ class FlickPricingSensor(SensorEntity):
|
||||
async with async_timeout.timeout(60):
|
||||
self._price = await self._api.getPricing()
|
||||
|
||||
_LOGGER.debug("Pricing data: %s", self._price)
|
||||
|
||||
self._attributes[ATTR_START_AT] = self._price.start_at
|
||||
self._attributes[ATTR_END_AT] = self._price.end_at
|
||||
for component in self._price.components:
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STARTED, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_time_change,
|
||||
@@ -88,6 +88,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def _async_migrate_unique_ids(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Migrate entities when the mac address gets discovered."""
|
||||
unique_id = entry.unique_id
|
||||
if not unique_id:
|
||||
return
|
||||
entry_id = entry.entry_id
|
||||
|
||||
@callback
|
||||
def _async_migrator(entity_entry: er.RegistryEntry) -> dict[str, Any] | None:
|
||||
# Old format {entry_id}.....
|
||||
# New format {unique_id}....
|
||||
entity_unique_id = entity_entry.unique_id
|
||||
if not entity_unique_id.startswith(entry_id):
|
||||
return None
|
||||
new_unique_id = f"{unique_id}{entity_unique_id[len(entry_id):]}"
|
||||
_LOGGER.info(
|
||||
"Migrating unique_id from [%s] to [%s]",
|
||||
entity_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
return {"new_unique_id": new_unique_id}
|
||||
|
||||
await er.async_migrate_entries(hass, entry.entry_id, _async_migrator)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Flux LED/MagicLight from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
@@ -135,6 +160,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# is either missing or we have verified it matches
|
||||
async_update_entry_from_discovery(hass, entry, discovery, device.model_num)
|
||||
|
||||
await _async_migrate_unique_ids(hass, entry)
|
||||
|
||||
coordinator = FluxLedUpdateCoordinator(hass, device, entry)
|
||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||
platforms = PLATFORMS_BY_TYPE[device.device_type]
|
||||
|
||||
@@ -64,8 +64,8 @@ class FluxButton(FluxBaseEntity, ButtonEntity):
|
||||
self.entity_description = description
|
||||
super().__init__(device, entry)
|
||||
self._attr_name = f"{entry.data[CONF_NAME]} {description.name}"
|
||||
if entry.unique_id:
|
||||
self._attr_unique_id = f"{entry.unique_id}_{description.key}"
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
self._attr_unique_id = f"{base_unique_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Send out a command."""
|
||||
|
||||
@@ -51,6 +51,7 @@ FLUX_LED_EXCEPTIONS: Final = (
|
||||
|
||||
STARTUP_SCAN_TIMEOUT: Final = 5
|
||||
DISCOVER_SCAN_TIMEOUT: Final = 10
|
||||
DIRECTED_DISCOVERY_TIMEOUT: Final = 15
|
||||
|
||||
CONF_MODEL: Final = "model"
|
||||
CONF_MODEL_NUM: Final = "model_num"
|
||||
|
||||
@@ -38,7 +38,7 @@ from .const import (
|
||||
CONF_REMOTE_ACCESS_ENABLED,
|
||||
CONF_REMOTE_ACCESS_HOST,
|
||||
CONF_REMOTE_ACCESS_PORT,
|
||||
DISCOVER_SCAN_TIMEOUT,
|
||||
DIRECTED_DISCOVERY_TIMEOUT,
|
||||
DOMAIN,
|
||||
FLUX_LED_DISCOVERY,
|
||||
)
|
||||
@@ -194,7 +194,7 @@ async def async_discover_device(
|
||||
"""Direct discovery at a single ip instead of broadcast."""
|
||||
# If we are missing the unique_id we should be able to fetch it
|
||||
# from the device by doing a directed discovery at the host only
|
||||
for device in await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT, host):
|
||||
for device in await async_discover_devices(hass, DIRECTED_DISCOVERY_TIMEOUT, host):
|
||||
if device[ATTR_IPADDR] == host:
|
||||
return device
|
||||
return None
|
||||
|
||||
@@ -7,19 +7,28 @@ from typing import Any
|
||||
from flux_led.aiodevice import AIOWifiLedBulb
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.const import (
|
||||
ATTR_CONNECTIONS,
|
||||
ATTR_HW_VERSION,
|
||||
ATTR_IDENTIFIERS,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONF_NAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import CONF_MINOR_VERSION, CONF_MODEL, SIGNAL_STATE_UPDATED
|
||||
from .const import CONF_MINOR_VERSION, CONF_MODEL, DOMAIN, SIGNAL_STATE_UPDATED
|
||||
from .coordinator import FluxLedUpdateCoordinator
|
||||
|
||||
|
||||
def _async_device_info(
|
||||
unique_id: str, device: AIOWifiLedBulb, entry: config_entries.ConfigEntry
|
||||
device: AIOWifiLedBulb, entry: config_entries.ConfigEntry
|
||||
) -> DeviceInfo:
|
||||
version_num = device.version_num
|
||||
if minor_version := entry.data.get(CONF_MINOR_VERSION):
|
||||
@@ -27,14 +36,18 @@ def _async_device_info(
|
||||
sw_version_str = f"{sw_version:0.2f}"
|
||||
else:
|
||||
sw_version_str = str(device.version_num)
|
||||
return DeviceInfo(
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, unique_id)},
|
||||
manufacturer="Zengge",
|
||||
model=device.model,
|
||||
name=entry.data[CONF_NAME],
|
||||
sw_version=sw_version_str,
|
||||
hw_version=entry.data.get(CONF_MODEL),
|
||||
)
|
||||
device_info: DeviceInfo = {
|
||||
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
|
||||
ATTR_MANUFACTURER: "Zengge",
|
||||
ATTR_MODEL: device.model,
|
||||
ATTR_NAME: entry.data[CONF_NAME],
|
||||
ATTR_SW_VERSION: sw_version_str,
|
||||
}
|
||||
if hw_model := entry.data.get(CONF_MODEL):
|
||||
device_info[ATTR_HW_VERSION] = hw_model
|
||||
if entry.unique_id:
|
||||
device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, entry.unique_id)}
|
||||
return device_info
|
||||
|
||||
|
||||
class FluxBaseEntity(Entity):
|
||||
@@ -50,10 +63,7 @@ class FluxBaseEntity(Entity):
|
||||
"""Initialize the light."""
|
||||
self._device: AIOWifiLedBulb = device
|
||||
self.entry = entry
|
||||
if entry.unique_id:
|
||||
self._attr_device_info = _async_device_info(
|
||||
entry.unique_id, self._device, entry
|
||||
)
|
||||
self._attr_device_info = _async_device_info(self._device, entry)
|
||||
|
||||
|
||||
class FluxEntity(CoordinatorEntity):
|
||||
@@ -64,7 +74,7 @@ class FluxEntity(CoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FluxLedUpdateCoordinator,
|
||||
unique_id: str | None,
|
||||
base_unique_id: str,
|
||||
name: str,
|
||||
key: str | None,
|
||||
) -> None:
|
||||
@@ -74,13 +84,10 @@ class FluxEntity(CoordinatorEntity):
|
||||
self._responding = True
|
||||
self._attr_name = name
|
||||
if key:
|
||||
self._attr_unique_id = f"{unique_id}_{key}"
|
||||
self._attr_unique_id = f"{base_unique_id}_{key}"
|
||||
else:
|
||||
self._attr_unique_id = unique_id
|
||||
if unique_id:
|
||||
self._attr_device_info = _async_device_info(
|
||||
unique_id, self._device, coordinator.entry
|
||||
)
|
||||
self._attr_unique_id = base_unique_id
|
||||
self._attr_device_info = _async_device_info(self._device, coordinator.entry)
|
||||
|
||||
async def _async_ensure_device_on(self) -> None:
|
||||
"""Turn the device on if it needs to be turned on before a command."""
|
||||
|
||||
@@ -177,7 +177,7 @@ async def async_setup_entry(
|
||||
[
|
||||
FluxLight(
|
||||
coordinator,
|
||||
entry.unique_id,
|
||||
entry.unique_id or entry.entry_id,
|
||||
entry.data[CONF_NAME],
|
||||
list(custom_effect_colors),
|
||||
options.get(CONF_CUSTOM_EFFECT_SPEED_PCT, DEFAULT_EFFECT_SPEED),
|
||||
@@ -195,14 +195,14 @@ class FluxLight(FluxOnOffEntity, CoordinatorEntity, LightEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FluxLedUpdateCoordinator,
|
||||
unique_id: str | None,
|
||||
base_unique_id: str,
|
||||
name: str,
|
||||
custom_effect_colors: list[tuple[int, int, int]],
|
||||
custom_effect_speed_pct: int,
|
||||
custom_effect_transition: str,
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
super().__init__(coordinator, unique_id, name, None)
|
||||
super().__init__(coordinator, base_unique_id, name, None)
|
||||
self._attr_min_mireds = color_temperature_kelvin_to_mired(self._device.max_temp)
|
||||
self._attr_max_mireds = color_temperature_kelvin_to_mired(self._device.min_temp)
|
||||
self._attr_supported_color_modes = _hass_color_modes(self._device)
|
||||
|
||||
@@ -51,26 +51,28 @@ async def async_setup_entry(
|
||||
| FluxMusicSegmentsNumber
|
||||
] = []
|
||||
name = entry.data[CONF_NAME]
|
||||
unique_id = entry.unique_id
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
if device.pixels_per_segment is not None:
|
||||
entities.append(
|
||||
FluxPixelsPerSegmentNumber(
|
||||
coordinator,
|
||||
unique_id,
|
||||
base_unique_id,
|
||||
f"{name} Pixels Per Segment",
|
||||
"pixels_per_segment",
|
||||
)
|
||||
)
|
||||
if device.segments is not None:
|
||||
entities.append(
|
||||
FluxSegmentsNumber(coordinator, unique_id, f"{name} Segments", "segments")
|
||||
FluxSegmentsNumber(
|
||||
coordinator, base_unique_id, f"{name} Segments", "segments"
|
||||
)
|
||||
)
|
||||
if device.music_pixels_per_segment is not None:
|
||||
entities.append(
|
||||
FluxMusicPixelsPerSegmentNumber(
|
||||
coordinator,
|
||||
unique_id,
|
||||
base_unique_id,
|
||||
f"{name} Music Pixels Per Segment",
|
||||
"music_pixels_per_segment",
|
||||
)
|
||||
@@ -78,12 +80,12 @@ async def async_setup_entry(
|
||||
if device.music_segments is not None:
|
||||
entities.append(
|
||||
FluxMusicSegmentsNumber(
|
||||
coordinator, unique_id, f"{name} Music Segments", "music_segments"
|
||||
coordinator, base_unique_id, f"{name} Music Segments", "music_segments"
|
||||
)
|
||||
)
|
||||
if device.effect_list and device.effect_list != [EFFECT_RANDOM]:
|
||||
entities.append(
|
||||
FluxSpeedNumber(coordinator, unique_id, f"{name} Effect Speed", None)
|
||||
FluxSpeedNumber(coordinator, base_unique_id, f"{name} Effect Speed", None)
|
||||
)
|
||||
|
||||
if entities:
|
||||
@@ -131,12 +133,12 @@ class FluxConfigNumber(FluxEntity, CoordinatorEntity, NumberEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FluxLedUpdateCoordinator,
|
||||
unique_id: str | None,
|
||||
base_unique_id: str,
|
||||
name: str,
|
||||
key: str | None,
|
||||
) -> None:
|
||||
"""Initialize the flux number."""
|
||||
super().__init__(coordinator, unique_id, name, key)
|
||||
super().__init__(coordinator, base_unique_id, name, key)
|
||||
self._debouncer: Debouncer | None = None
|
||||
self._pending_value: int | None = None
|
||||
|
||||
|
||||
@@ -54,28 +54,28 @@ async def async_setup_entry(
|
||||
| FluxWhiteChannelSelect
|
||||
] = []
|
||||
name = entry.data[CONF_NAME]
|
||||
unique_id = entry.unique_id
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
if device.device_type == DeviceType.Switch:
|
||||
entities.append(FluxPowerStateSelect(coordinator.device, entry))
|
||||
if device.operating_modes:
|
||||
entities.append(
|
||||
FluxOperatingModesSelect(
|
||||
coordinator, unique_id, f"{name} Operating Mode", "operating_mode"
|
||||
coordinator, base_unique_id, f"{name} Operating Mode", "operating_mode"
|
||||
)
|
||||
)
|
||||
if device.wirings:
|
||||
entities.append(
|
||||
FluxWiringsSelect(coordinator, unique_id, f"{name} Wiring", "wiring")
|
||||
FluxWiringsSelect(coordinator, base_unique_id, f"{name} Wiring", "wiring")
|
||||
)
|
||||
if device.ic_types:
|
||||
entities.append(
|
||||
FluxICTypeSelect(coordinator, unique_id, f"{name} IC Type", "ic_type")
|
||||
FluxICTypeSelect(coordinator, base_unique_id, f"{name} IC Type", "ic_type")
|
||||
)
|
||||
if device.remote_config:
|
||||
entities.append(
|
||||
FluxRemoteConfigSelect(
|
||||
coordinator, unique_id, f"{name} Remote Config", "remote_config"
|
||||
coordinator, base_unique_id, f"{name} Remote Config", "remote_config"
|
||||
)
|
||||
)
|
||||
if FLUX_COLOR_MODE_RGBW in device.color_modes:
|
||||
@@ -111,8 +111,8 @@ class FluxPowerStateSelect(FluxConfigAtStartSelect, SelectEntity):
|
||||
"""Initialize the power state select."""
|
||||
super().__init__(device, entry)
|
||||
self._attr_name = f"{entry.data[CONF_NAME]} Power Restored"
|
||||
if entry.unique_id:
|
||||
self._attr_unique_id = f"{entry.unique_id}_power_restored"
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
self._attr_unique_id = f"{base_unique_id}_power_restored"
|
||||
self._async_set_current_option_from_device()
|
||||
|
||||
@callback
|
||||
@@ -201,12 +201,12 @@ class FluxRemoteConfigSelect(FluxConfigSelect):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FluxLedUpdateCoordinator,
|
||||
unique_id: str | None,
|
||||
base_unique_id: str,
|
||||
name: str,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize the remote config type select."""
|
||||
super().__init__(coordinator, unique_id, name, key)
|
||||
super().__init__(coordinator, base_unique_id, name, key)
|
||||
assert self._device.remote_config is not None
|
||||
self._name_to_state = {
|
||||
_human_readable_option(option.name): option for option in RemoteConfig
|
||||
@@ -238,8 +238,8 @@ class FluxWhiteChannelSelect(FluxConfigAtStartSelect):
|
||||
"""Initialize the white channel select."""
|
||||
super().__init__(device, entry)
|
||||
self._attr_name = f"{entry.data[CONF_NAME]} White Channel"
|
||||
if entry.unique_id:
|
||||
self._attr_unique_id = f"{entry.unique_id}_white_channel"
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
self._attr_unique_id = f"{base_unique_id}_white_channel"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
|
||||
@@ -25,7 +25,7 @@ async def async_setup_entry(
|
||||
[
|
||||
FluxPairedRemotes(
|
||||
coordinator,
|
||||
entry.unique_id,
|
||||
entry.unique_id or entry.entry_id,
|
||||
f"{entry.data[CONF_NAME]} Paired Remotes",
|
||||
"paired_remotes",
|
||||
)
|
||||
|
||||
@@ -34,18 +34,18 @@ async def async_setup_entry(
|
||||
"""Set up the Flux lights."""
|
||||
coordinator: FluxLedUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
entities: list[FluxSwitch | FluxRemoteAccessSwitch | FluxMusicSwitch] = []
|
||||
unique_id = entry.unique_id
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
name = entry.data[CONF_NAME]
|
||||
|
||||
if coordinator.device.device_type == DeviceType.Switch:
|
||||
entities.append(FluxSwitch(coordinator, unique_id, name, None))
|
||||
entities.append(FluxSwitch(coordinator, base_unique_id, name, None))
|
||||
|
||||
if entry.data.get(CONF_REMOTE_ACCESS_HOST):
|
||||
entities.append(FluxRemoteAccessSwitch(coordinator.device, entry))
|
||||
|
||||
if coordinator.device.microphone:
|
||||
entities.append(
|
||||
FluxMusicSwitch(coordinator, unique_id, f"{name} Music", "music")
|
||||
FluxMusicSwitch(coordinator, base_unique_id, f"{name} Music", "music")
|
||||
)
|
||||
|
||||
if entities:
|
||||
@@ -74,8 +74,8 @@ class FluxRemoteAccessSwitch(FluxBaseEntity, SwitchEntity):
|
||||
"""Initialize the light."""
|
||||
super().__init__(device, entry)
|
||||
self._attr_name = f"{entry.data[CONF_NAME]} Remote Access"
|
||||
if entry.unique_id:
|
||||
self._attr_unique_id = f"{entry.unique_id}_remote_access"
|
||||
base_unique_id = entry.unique_id or entry.entry_id
|
||||
self._attr_unique_id = f"{base_unique_id}_remote_access"
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the remote access on."""
|
||||
|
||||
@@ -14,6 +14,7 @@ from fritzconnection.core.exceptions import (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzConnectionException,
|
||||
FritzInternalError,
|
||||
FritzLookUpError,
|
||||
FritzSecurityError,
|
||||
FritzServiceError,
|
||||
@@ -106,7 +107,7 @@ class Device:
|
||||
ip_address: str
|
||||
name: str
|
||||
ssid: str | None
|
||||
wan_access: bool = True
|
||||
wan_access: bool | None = None
|
||||
|
||||
|
||||
class Interface(TypedDict):
|
||||
@@ -276,6 +277,14 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
)
|
||||
return bool(version), version
|
||||
|
||||
def _get_wan_access(self, ip_address: str) -> bool | None:
|
||||
"""Get WAN access rule for given IP address."""
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
|
||||
async def async_scan_devices(self, now: datetime | None = None) -> None:
|
||||
"""Wrap up FritzboxTools class scan."""
|
||||
await self.hass.async_add_executor_job(self.scan_devices, now)
|
||||
@@ -314,7 +323,7 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
connection_type="",
|
||||
ip_address=host["ip"],
|
||||
ssid=None,
|
||||
wan_access=False,
|
||||
wan_access=None,
|
||||
)
|
||||
|
||||
mesh_intf = {}
|
||||
@@ -342,32 +351,32 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
|
||||
for interf in node["node_interfaces"]:
|
||||
dev_mac = interf["mac_address"]
|
||||
|
||||
if dev_mac not in hosts:
|
||||
continue
|
||||
|
||||
dev_info: Device = hosts[dev_mac]
|
||||
|
||||
for link in interf["node_links"]:
|
||||
intf = mesh_intf.get(link["node_interface_1_uid"])
|
||||
if (
|
||||
intf is not None
|
||||
and link["state"] == "CONNECTED"
|
||||
and dev_mac in hosts
|
||||
):
|
||||
dev_info: Device = hosts[dev_mac]
|
||||
if intf["op_mode"] != "AP_GUEST":
|
||||
dev_info.wan_access = not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=dev_info.ip_address,
|
||||
).get("NewDisallow")
|
||||
if intf is not None:
|
||||
if intf["op_mode"] != "AP_GUEST" and dev_info.ip_address:
|
||||
dev_info.wan_access = self._get_wan_access(
|
||||
dev_info.ip_address
|
||||
)
|
||||
|
||||
dev_info.connected_to = intf["device"]
|
||||
dev_info.connection_type = intf["type"]
|
||||
dev_info.ssid = intf.get("ssid")
|
||||
_LOGGER.debug("Client dev_info: %s", dev_info)
|
||||
|
||||
if dev_mac in self._devices:
|
||||
self._devices[dev_mac].update(dev_info, consider_home)
|
||||
else:
|
||||
device = FritzDevice(dev_mac, dev_info.name)
|
||||
device.update(dev_info, consider_home)
|
||||
self._devices[dev_mac] = device
|
||||
new_device = True
|
||||
if dev_mac in self._devices:
|
||||
self._devices[dev_mac].update(dev_info, consider_home)
|
||||
else:
|
||||
device = FritzDevice(dev_mac, dev_info.name)
|
||||
device.update(dev_info, consider_home)
|
||||
self._devices[dev_mac] = device
|
||||
new_device = True
|
||||
|
||||
dispatcher_send(self.hass, self.signal_device_update)
|
||||
if new_device:
|
||||
@@ -523,6 +532,7 @@ class AvmWrapper(FritzBoxTools):
|
||||
except (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzInternalError,
|
||||
FritzServiceError,
|
||||
FritzLookUpError,
|
||||
):
|
||||
@@ -758,7 +768,7 @@ class FritzDevice:
|
||||
self._mac = mac
|
||||
self._name = name
|
||||
self._ssid: str | None = None
|
||||
self._wan_access = False
|
||||
self._wan_access: bool | None = False
|
||||
|
||||
def update(self, dev_info: Device, consider_home: float) -> None:
|
||||
"""Update device info."""
|
||||
@@ -826,7 +836,7 @@ class FritzDevice:
|
||||
return self._ssid
|
||||
|
||||
@property
|
||||
def wan_access(self) -> bool:
|
||||
def wan_access(self) -> bool | None:
|
||||
"""Return device wan access."""
|
||||
return self._wan_access
|
||||
|
||||
|
||||
@@ -477,10 +477,17 @@ class FritzBoxProfileSwitch(FritzDeviceBase, SwitchEntity):
|
||||
self._attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
def is_on(self) -> bool | None:
|
||||
"""Switch status."""
|
||||
return self._avm_wrapper.devices[self._mac].wan_access
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return availability of the switch."""
|
||||
if self._avm_wrapper.devices[self._mac].wan_access is None:
|
||||
return False
|
||||
return super().available
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
|
||||
@@ -76,7 +76,7 @@ async def async_setup_entry(
|
||||
for description in NUMBERS:
|
||||
try:
|
||||
current_value = await description.getter(inverter)
|
||||
except InverterError:
|
||||
except (InverterError, ValueError):
|
||||
# Inverter model does not support this setting
|
||||
_LOGGER.debug("Could not read inverter setting %s", description.key)
|
||||
continue
|
||||
|
||||
@@ -42,7 +42,7 @@ async def async_setup_entry(
|
||||
# read current operating mode from the inverter
|
||||
try:
|
||||
active_mode = await inverter.get_operation_mode()
|
||||
except InverterError:
|
||||
except (InverterError, ValueError):
|
||||
# Inverter model does not support this setting
|
||||
_LOGGER.debug("Could not read inverter operation mode")
|
||||
else:
|
||||
|
||||
@@ -294,6 +294,15 @@ async def async_devices_reachable(hass, data: RequestData, payload):
|
||||
}
|
||||
|
||||
|
||||
@HANDLERS.register("action.devices.PROXY_SELECTED")
|
||||
async def async_devices_proxy_selected(hass, data: RequestData, payload):
|
||||
"""Handle action.devices.PROXY_SELECTED request.
|
||||
|
||||
When selected for local SDK.
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
||||
def turned_off_response(message):
|
||||
"""Return a device turned off response."""
|
||||
return {
|
||||
|
||||
@@ -190,7 +190,7 @@ class HumidifierDehumidifier(HomeAccessory):
|
||||
)
|
||||
self.char_current_humidity.set_value(current_humidity)
|
||||
except ValueError as ex:
|
||||
_LOGGER.error(
|
||||
_LOGGER.debug(
|
||||
"%s: Unable to update from linked humidity sensor %s: %s",
|
||||
self.entity_id,
|
||||
self.linked_humidity_sensor,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohomekit
|
||||
@@ -26,6 +27,8 @@ from .connection import HKDevice, valid_serial_number
|
||||
from .const import CONTROLLER, ENTITY_MAP, KNOWN_DEVICES, TRIGGERS
|
||||
from .storage import EntityMapStorage
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def escape_characteristic_name(char_name):
|
||||
"""Escape any dash or dots in a characteristics name."""
|
||||
@@ -248,4 +251,21 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Cleanup caches before removing config entry."""
|
||||
hkid = entry.data["AccessoryPairingID"]
|
||||
|
||||
# Remove cached type data from .storage/homekit_controller-entity-map
|
||||
hass.data[ENTITY_MAP].async_delete_map(hkid)
|
||||
|
||||
# Remove the pairing on the device, making the device discoverable again.
|
||||
# Don't reuse any objects in hass.data as they are already unloaded
|
||||
async_zeroconf_instance = await zeroconf.async_get_async_instance(hass)
|
||||
controller = aiohomekit.Controller(async_zeroconf_instance=async_zeroconf_instance)
|
||||
controller.load_pairing(hkid, dict(entry.data))
|
||||
try:
|
||||
await controller.remove_pairing(hkid)
|
||||
except aiohomekit.AccessoryDisconnectedError:
|
||||
_LOGGER.warning(
|
||||
"Accessory %s was removed from HomeAssistant but was not reachable "
|
||||
"to properly unpair. It may need resetting before you can use it with "
|
||||
"HomeKit again",
|
||||
entry.title,
|
||||
)
|
||||
|
||||
@@ -44,21 +44,21 @@ class HomeKitSensorEntityDescription(SensorEntityDescription):
|
||||
SIMPLE_SENSOR: dict[str, HomeKitSensorEntityDescription] = {
|
||||
CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_WATT: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_WATT,
|
||||
name="Real Time Energy",
|
||||
name="Power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
),
|
||||
CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_AMPS: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_AMPS,
|
||||
name="Real Time Current",
|
||||
name="Current",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
|
||||
),
|
||||
CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_AMPS_20: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.CONNECTSENSE_ENERGY_AMPS_20,
|
||||
name="Real Time Current",
|
||||
name="Current",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
|
||||
@@ -72,7 +72,7 @@ SIMPLE_SENSOR: dict[str, HomeKitSensorEntityDescription] = {
|
||||
),
|
||||
CharacteristicsTypes.Vendor.EVE_ENERGY_WATT: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.EVE_ENERGY_WATT,
|
||||
name="Real Time Energy",
|
||||
name="Power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
@@ -100,14 +100,14 @@ SIMPLE_SENSOR: dict[str, HomeKitSensorEntityDescription] = {
|
||||
),
|
||||
CharacteristicsTypes.Vendor.KOOGEEK_REALTIME_ENERGY: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.KOOGEEK_REALTIME_ENERGY,
|
||||
name="Real Time Energy",
|
||||
name="Power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
),
|
||||
CharacteristicsTypes.Vendor.KOOGEEK_REALTIME_ENERGY_2: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.KOOGEEK_REALTIME_ENERGY_2,
|
||||
name="Real Time Energy",
|
||||
name="Power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
@@ -121,7 +121,7 @@ SIMPLE_SENSOR: dict[str, HomeKitSensorEntityDescription] = {
|
||||
),
|
||||
CharacteristicsTypes.Vendor.VOCOLINC_OUTLET_ENERGY: HomeKitSensorEntityDescription(
|
||||
key=CharacteristicsTypes.Vendor.VOCOLINC_OUTLET_ENERGY,
|
||||
name="Real Time Energy",
|
||||
name="Power",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
|
||||
@@ -158,6 +158,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
existing_entry = await self.async_set_unique_id(isy_mac)
|
||||
if not existing_entry:
|
||||
return
|
||||
if existing_entry.source == config_entries.SOURCE_IGNORE:
|
||||
raise data_entry_flow.AbortFlow("already_configured")
|
||||
parsed_url = urlparse(existing_entry.data[CONF_HOST])
|
||||
if parsed_url.hostname != ip_address:
|
||||
new_netloc = ip_address
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateTime, ExposeSensor
|
||||
from xknx.dpt import DPTNumeric
|
||||
from xknx.dpt import DPTNumeric, DPTString
|
||||
from xknx.exceptions import ConversionError
|
||||
from xknx.remote_value import RemoteValueSensor
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -22,6 +24,8 @@ from homeassistant.helpers.typing import ConfigType, StateType
|
||||
from .const import KNX_ADDRESS
|
||||
from .schema import ExposeSchema
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def create_knx_exposure(
|
||||
@@ -101,7 +105,10 @@ class KNXExposeSensor:
|
||||
"""Initialize state of the exposure."""
|
||||
init_state = self.hass.states.get(self.entity_id)
|
||||
state_value = self._get_expose_value(init_state)
|
||||
self.device.sensor_value.value = state_value
|
||||
try:
|
||||
self.device.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception("Error during sending of expose sensor value")
|
||||
|
||||
@callback
|
||||
def shutdown(self) -> None:
|
||||
@@ -132,6 +139,13 @@ class KNXExposeSensor:
|
||||
and issubclass(self.device.sensor_value.dpt_class, DPTNumeric)
|
||||
):
|
||||
return float(value)
|
||||
if (
|
||||
value is not None
|
||||
and isinstance(self.device.sensor_value, RemoteValueSensor)
|
||||
and issubclass(self.device.sensor_value.dpt_class, DPTString)
|
||||
):
|
||||
# DPT 16.000 only allows up to 14 Bytes
|
||||
return str(value)[:14]
|
||||
return value
|
||||
|
||||
async def _async_entity_changed(self, event: Event) -> None:
|
||||
@@ -148,9 +162,10 @@ class KNXExposeSensor:
|
||||
|
||||
async def _async_set_knx_value(self, value: StateType) -> None:
|
||||
"""Set new value on xknx ExposeSensor."""
|
||||
if value is None:
|
||||
return
|
||||
await self.device.set(value)
|
||||
try:
|
||||
await self.device.set(value)
|
||||
except ConversionError:
|
||||
_LOGGER.exception("Error during sending of expose sensor value")
|
||||
|
||||
|
||||
class KNXExposeTime:
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/knx",
|
||||
"requirements": [
|
||||
"xknx==0.19.0"
|
||||
"xknx==0.19.1"
|
||||
],
|
||||
"codeowners": [
|
||||
"@Julius2342",
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from google_nest_sdm import diagnostics
|
||||
from google_nest_sdm.device import Device
|
||||
from google_nest_sdm.device_traits import InfoTrait
|
||||
from google_nest_sdm.exceptions import ApiException
|
||||
@@ -30,22 +31,14 @@ async def async_get_config_entry_diagnostics(
|
||||
return {"error": str(err)}
|
||||
|
||||
return {
|
||||
**diagnostics.get_diagnostics(),
|
||||
"devices": [
|
||||
get_device_data(device) for device in device_manager.devices.values()
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_device_data(device: Device) -> dict[str, Any]:
|
||||
"""Return diagnostic information about a device."""
|
||||
# Return a simplified view of the API object, but skipping any id fields or
|
||||
# traits that include unique identifiers or personally identifiable information.
|
||||
# See https://developers.google.com/nest/device-access/traits for API details
|
||||
return {
|
||||
"type": device.type,
|
||||
"traits": {
|
||||
trait: data
|
||||
for trait, data in device.raw_data.get("traits", {}).items()
|
||||
if trait not in REDACT_DEVICE_TRAITS
|
||||
},
|
||||
}
|
||||
# Library performs its own redaction for device data
|
||||
return device.get_diagnostics()
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ffmpeg", "http", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"requirements": ["python-nest==4.1.0", "google-nest-sdm==1.5.1"],
|
||||
"requirements": ["python-nest==4.1.0", "google-nest-sdm==1.6.0"],
|
||||
"codeowners": ["@allenporter"],
|
||||
"quality_scale": "platinum",
|
||||
"dhcp": [
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
"domain": "oncue",
|
||||
"name": "Oncue by Kohler",
|
||||
"config_flow": true,
|
||||
"dhcp": [{
|
||||
"hostname": "kohlergen*",
|
||||
"macaddress": "00146F*"
|
||||
}],
|
||||
"documentation": "https://www.home-assistant.io/integrations/oncue",
|
||||
"requirements": ["aiooncue==0.3.2"],
|
||||
"codeowners": ["@bdraco"],
|
||||
|
||||
33
homeassistant/components/onewire/diagnostics.py
Normal file
33
homeassistant/components/onewire/diagnostics.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Diagnostics support for 1-Wire."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .onewirehub import OneWireHub
|
||||
|
||||
TO_REDACT = {CONF_HOST}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
onewirehub: OneWireHub = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
return {
|
||||
"entry": {
|
||||
"title": entry.title,
|
||||
"data": async_redact_data(entry.data, TO_REDACT),
|
||||
"options": {**entry.options},
|
||||
},
|
||||
"devices": [asdict(device_details) for device_details in onewirehub.devices]
|
||||
if onewirehub.devices
|
||||
else [],
|
||||
}
|
||||
@@ -48,7 +48,8 @@ class Awning(OverkizGenericCover):
|
||||
|
||||
None is unknown, 0 is closed, 100 is fully open.
|
||||
"""
|
||||
if current_position := self.executor.select_state(OverkizState.CORE_DEPLOYMENT):
|
||||
current_position = self.executor.select_state(OverkizState.CORE_DEPLOYMENT)
|
||||
if current_position is not None:
|
||||
return cast(int, current_position)
|
||||
|
||||
return None
|
||||
|
||||
@@ -51,9 +51,10 @@ class OverkizGenericCover(OverkizEntity, CoverEntity):
|
||||
|
||||
None is unknown, 0 is closed, 100 is fully open.
|
||||
"""
|
||||
if position := self.executor.select_state(
|
||||
position = self.executor.select_state(
|
||||
OverkizState.CORE_SLATS_ORIENTATION, OverkizState.CORE_SLATE_ORIENTATION
|
||||
):
|
||||
)
|
||||
if position is not None:
|
||||
return 100 - cast(int, position)
|
||||
|
||||
return None
|
||||
|
||||
@@ -79,8 +79,9 @@ class OverkizLight(OverkizEntity, LightEntity):
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
"""Return the brightness of this light (0-255)."""
|
||||
if brightness := self.executor.select_state(OverkizState.CORE_LIGHT_INTENSITY):
|
||||
return round(cast(int, brightness) * 255 / 100)
|
||||
value = self.executor.select_state(OverkizState.CORE_LIGHT_INTENSITY)
|
||||
if value is not None:
|
||||
return round(cast(int, value) * 255 / 100)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
35
homeassistant/components/p1_monitor/diagnostics.py
Normal file
35
homeassistant/components/p1_monitor/diagnostics.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Diagnostics support for P1 Monitor."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import P1MonitorDataUpdateCoordinator
|
||||
from .const import DOMAIN, SERVICE_PHASES, SERVICE_SETTINGS, SERVICE_SMARTMETER
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_HOST,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: P1MonitorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
return {
|
||||
"entry": {
|
||||
"title": entry.title,
|
||||
"data": async_redact_data(entry.data, TO_REDACT),
|
||||
},
|
||||
"data": {
|
||||
"smartmeter": coordinator.data[SERVICE_SMARTMETER].__dict__,
|
||||
"phases": coordinator.data[SERVICE_PHASES].__dict__,
|
||||
"settings": coordinator.data[SERVICE_SETTINGS].__dict__,
|
||||
},
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import requests
|
||||
from tesla_powerwall import (
|
||||
AccessDeniedError,
|
||||
APIError,
|
||||
MissingAttributeError,
|
||||
Powerwall,
|
||||
PowerwallUnreachableError,
|
||||
@@ -131,7 +132,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
power_wall = Powerwall(ip_address, http_session=http_session)
|
||||
runtime_data[POWERWALL_OBJECT] = power_wall
|
||||
runtime_data[POWERWALL_HTTP_SESSION] = http_session
|
||||
power_wall.login("", password)
|
||||
power_wall.login(password)
|
||||
|
||||
async def _async_login_and_retry_update_data():
|
||||
"""Retry the update after a failed login."""
|
||||
nonlocal login_failed_count
|
||||
# If the session expired, recreate, relogin, and try again
|
||||
_LOGGER.debug("Retrying login and updating data")
|
||||
try:
|
||||
await hass.async_add_executor_job(_recreate_powerwall_login)
|
||||
data = await _async_update_powerwall_data(hass, entry, power_wall)
|
||||
except AccessDeniedError as err:
|
||||
login_failed_count += 1
|
||||
if login_failed_count == MAX_LOGIN_FAILURES:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise UpdateFailed(
|
||||
f"Login attempt {login_failed_count}/{MAX_LOGIN_FAILURES} failed, will retry: {err}"
|
||||
) from err
|
||||
except APIError as err:
|
||||
raise UpdateFailed(f"Updated failed due to {err}, will retry") from err
|
||||
else:
|
||||
login_failed_count = 0
|
||||
return data
|
||||
|
||||
async def async_update_data():
|
||||
"""Fetch data from API endpoint."""
|
||||
@@ -147,18 +169,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except AccessDeniedError as err:
|
||||
if password is None:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
# If the session expired, recreate, relogin, and try again
|
||||
try:
|
||||
await hass.async_add_executor_job(_recreate_powerwall_login)
|
||||
return await _async_update_powerwall_data(hass, entry, power_wall)
|
||||
except AccessDeniedError as ex:
|
||||
login_failed_count += 1
|
||||
if login_failed_count == MAX_LOGIN_FAILURES:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
raise UpdateFailed(
|
||||
f"Login attempt {login_failed_count}/{MAX_LOGIN_FAILURES} failed, will retry"
|
||||
) from ex
|
||||
return await _async_login_and_retry_update_data()
|
||||
except APIError as err:
|
||||
raise UpdateFailed(f"Updated failed due to {err}, will retry") from err
|
||||
else:
|
||||
login_failed_count = 0
|
||||
return data
|
||||
|
||||
@@ -13,7 +13,11 @@
|
||||
{
|
||||
"hostname": "roomba-*",
|
||||
"macaddress": "80A589*"
|
||||
}
|
||||
},
|
||||
{
|
||||
"hostname": "roomba-*",
|
||||
"macaddress": "DCF505*"
|
||||
}
|
||||
],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
17
homeassistant/components/rtsp_to_webrtc/diagnostics.py
Normal file
17
homeassistant/components/rtsp_to_webrtc/diagnostics.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Diagnostics support for Nest."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from rtsp_to_webrtc import client
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return dict(client.get_diagnostics())
|
||||
@@ -5,6 +5,7 @@ from abc import ABC, abstractmethod
|
||||
import contextlib
|
||||
from typing import Any
|
||||
|
||||
from requests.exceptions import Timeout as RequestsTimeout
|
||||
from samsungctl import Remote
|
||||
from samsungctl.exceptions import AccessDenied, ConnectionClosed, UnhandledResponse
|
||||
from samsungtvws import SamsungTVWS
|
||||
@@ -321,7 +322,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
def device_info(self) -> dict[str, Any] | None:
|
||||
"""Try to gather infos of this TV."""
|
||||
if remote := self._get_remote(avoid_open=True):
|
||||
with contextlib.suppress(HttpApiError):
|
||||
with contextlib.suppress(HttpApiError, RequestsTimeout):
|
||||
device_info: dict[str, Any] = remote.rest_device_info()
|
||||
return device_info
|
||||
|
||||
|
||||
@@ -31,50 +31,30 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up SenseME lights."""
|
||||
device = hass.data[DOMAIN][entry.entry_id]
|
||||
if device.has_light:
|
||||
async_add_entities([HASensemeLight(device)])
|
||||
if not device.has_light:
|
||||
return
|
||||
if device.is_light:
|
||||
async_add_entities([HASensemeStandaloneLight(device)])
|
||||
else:
|
||||
async_add_entities([HASensemeFanLight(device)])
|
||||
|
||||
|
||||
class HASensemeLight(SensemeEntity, LightEntity):
|
||||
"""Representation of a Big Ass Fans SenseME light."""
|
||||
|
||||
def __init__(self, device: SensemeDevice) -> None:
|
||||
def __init__(self, device: SensemeDevice, name: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._device = device
|
||||
if device.is_light:
|
||||
name = device.name # The device itself is a light
|
||||
else:
|
||||
name = f"{device.name} Light" # A fan light
|
||||
super().__init__(device, name)
|
||||
if device.is_light:
|
||||
self._attr_supported_color_modes = {COLOR_MODE_COLOR_TEMP}
|
||||
self._attr_color_mode = COLOR_MODE_COLOR_TEMP
|
||||
else:
|
||||
self._attr_supported_color_modes = {COLOR_MODE_BRIGHTNESS}
|
||||
self._attr_color_mode = COLOR_MODE_BRIGHTNESS
|
||||
self._attr_unique_id = f"{self._device.uuid}-LIGHT" # for legacy compat
|
||||
self._attr_min_mireds = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temp_max
|
||||
)
|
||||
self._attr_max_mireds = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temp_min
|
||||
)
|
||||
self._attr_unique_id = f"{device.uuid}-LIGHT" # for legacy compat
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
self._attr_is_on = self._device.light_on
|
||||
self._attr_brightness = int(min(255, self._device.light_brightness * 16))
|
||||
self._attr_color_temp = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temp
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:
|
||||
self._device.light_color_temp = color_temperature_mired_to_kelvin(
|
||||
color_temp
|
||||
)
|
||||
if (brightness := kwargs.get(ATTR_BRIGHTNESS)) is not None:
|
||||
# set the brightness, which will also turn on/off light
|
||||
if brightness == 255:
|
||||
@@ -86,3 +66,45 @@ class HASensemeLight(SensemeEntity, LightEntity):
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the light."""
|
||||
self._device.light_on = False
|
||||
|
||||
|
||||
class HASensemeFanLight(HASensemeLight):
|
||||
"""Representation of a Big Ass Fans SenseME light on a fan."""
|
||||
|
||||
def __init__(self, device: SensemeDevice) -> None:
|
||||
"""Init a fan light."""
|
||||
super().__init__(device, device.name)
|
||||
self._attr_supported_color_modes = {COLOR_MODE_BRIGHTNESS}
|
||||
self._attr_color_mode = COLOR_MODE_BRIGHTNESS
|
||||
|
||||
|
||||
class HASensemeStandaloneLight(HASensemeLight):
|
||||
"""Representation of a Big Ass Fans SenseME light."""
|
||||
|
||||
def __init__(self, device: SensemeDevice) -> None:
|
||||
"""Init a standalone light."""
|
||||
super().__init__(device, f"{device.name} Light")
|
||||
self._attr_supported_color_modes = {COLOR_MODE_COLOR_TEMP}
|
||||
self._attr_color_mode = COLOR_MODE_COLOR_TEMP
|
||||
self._attr_min_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_color_temp_max
|
||||
)
|
||||
self._attr_max_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_color_temp_min
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
super()._async_update_attrs()
|
||||
self._attr_color_temp = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temp
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:
|
||||
self._device.light_color_temp = color_temperature_mired_to_kelvin(
|
||||
color_temp
|
||||
)
|
||||
await super().async_turn_on(**kwargs)
|
||||
|
||||
@@ -223,7 +223,7 @@ SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
available=lambda block: cast(int, block.extTemp) != 999
|
||||
and not block.sensorError,
|
||||
and not getattr(block, "sensorError", False),
|
||||
),
|
||||
("sensor", "humidity"): BlockSensorDescription(
|
||||
key="sensor|humidity",
|
||||
@@ -233,7 +233,7 @@ SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
available=lambda block: cast(int, block.humidity) != 999
|
||||
and not block.sensorError,
|
||||
and not getattr(block, "sensorError", False),
|
||||
),
|
||||
("sensor", "luminosity"): BlockSensorDescription(
|
||||
key="sensor|luminosity",
|
||||
|
||||
@@ -125,15 +125,22 @@ def get_block_channel_name(device: BlockDevice, block: Block | None) -> str:
|
||||
return f"{entity_name} channel {chr(int(block.channel)+base)}"
|
||||
|
||||
|
||||
def is_block_momentary_input(settings: dict[str, Any], block: Block) -> bool:
|
||||
def is_block_momentary_input(
|
||||
settings: dict[str, Any], block: Block, include_detached: bool = False
|
||||
) -> bool:
|
||||
"""Return true if block input button settings is set to a momentary type."""
|
||||
momentary_types = ["momentary", "momentary_on_release"]
|
||||
|
||||
if include_detached:
|
||||
momentary_types.append("detached")
|
||||
|
||||
# Shelly Button type is fixed to momentary and no btn_type
|
||||
if settings["device"]["type"] in SHBTN_MODELS:
|
||||
return True
|
||||
|
||||
if settings.get("mode") == "roller":
|
||||
button_type = settings["rollers"][0]["button_type"]
|
||||
return button_type in ["momentary", "momentary_on_release"]
|
||||
return button_type in momentary_types
|
||||
|
||||
button = settings.get("relays") or settings.get("lights") or settings.get("inputs")
|
||||
if button is None:
|
||||
@@ -148,7 +155,7 @@ def is_block_momentary_input(settings: dict[str, Any], block: Block) -> bool:
|
||||
channel = min(int(block.channel or 0), len(button) - 1)
|
||||
button_type = button[channel].get("btn_type")
|
||||
|
||||
return button_type in ["momentary", "momentary_on_release"]
|
||||
return button_type in momentary_types
|
||||
|
||||
|
||||
def get_device_uptime(uptime: float, last_uptime: datetime | None) -> datetime:
|
||||
@@ -171,7 +178,7 @@ def get_block_input_triggers(
|
||||
if "inputEvent" not in block.sensor_ids or "inputEventCnt" not in block.sensor_ids:
|
||||
return []
|
||||
|
||||
if not is_block_momentary_input(device.settings, block):
|
||||
if not is_block_momentary_input(device.settings, block, True):
|
||||
return []
|
||||
|
||||
triggers = []
|
||||
|
||||
40
homeassistant/components/simplisafe/diagnostics.py
Normal file
40
homeassistant/components/simplisafe/diagnostics.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Diagnostics support for SimpliSafe."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import SimpliSafe
|
||||
from .const import DOMAIN
|
||||
|
||||
CONF_SERIAL = "serial"
|
||||
CONF_SYSTEM_ID = "system_id"
|
||||
CONF_WIFI_SSID = "wifi_ssid"
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_ADDRESS,
|
||||
CONF_SERIAL,
|
||||
CONF_SYSTEM_ID,
|
||||
CONF_WIFI_SSID,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
simplisafe: SimpliSafe = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
return async_redact_data(
|
||||
{
|
||||
"entry": {
|
||||
"options": dict(entry.options),
|
||||
},
|
||||
"systems": [system.as_dict() for system in simplisafe.systems.values()],
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==2021.12.2"],
|
||||
"requirements": ["simplisafe-python==2022.01.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling",
|
||||
"dhcp": [
|
||||
|
||||
@@ -193,6 +193,7 @@ class SonosDiscoveryManager:
|
||||
|
||||
async def _async_stop_event_listener(self, event: Event | None = None) -> None:
|
||||
for speaker in self.data.discovered.values():
|
||||
speaker.activity_stats.log_report()
|
||||
speaker.event_stats.log_report()
|
||||
await asyncio.gather(
|
||||
*(speaker.async_offline() for speaker in self.data.discovered.values())
|
||||
|
||||
@@ -130,5 +130,6 @@ async def async_generate_speaker_info(
|
||||
if s is speaker
|
||||
}
|
||||
payload["media"] = await async_generate_media_info(hass, speaker)
|
||||
payload["activity_stats"] = speaker.activity_stats.report()
|
||||
payload["event_stats"] = speaker.event_stats.report()
|
||||
return payload
|
||||
|
||||
@@ -62,7 +62,7 @@ from .const import (
|
||||
)
|
||||
from .favorites import SonosFavorites
|
||||
from .helpers import soco_error
|
||||
from .statistics import EventStatistics
|
||||
from .statistics import ActivityStatistics, EventStatistics
|
||||
|
||||
NEVER_TIME = -1200.0
|
||||
EVENT_CHARGING = {
|
||||
@@ -177,6 +177,7 @@ class SonosSpeaker:
|
||||
self._event_dispatchers: dict[str, Callable] = {}
|
||||
self._last_activity: float = NEVER_TIME
|
||||
self._last_event_cache: dict[str, Any] = {}
|
||||
self.activity_stats: ActivityStatistics = ActivityStatistics(self.zone_name)
|
||||
self.event_stats: EventStatistics = EventStatistics(self.zone_name)
|
||||
|
||||
# Scheduled callback handles
|
||||
@@ -528,6 +529,7 @@ class SonosSpeaker:
|
||||
"""Track the last activity on this speaker, set availability and resubscribe."""
|
||||
_LOGGER.debug("Activity on %s from %s", self.zone_name, source)
|
||||
self._last_activity = time.monotonic()
|
||||
self.activity_stats.activity(source, self._last_activity)
|
||||
was_available = self.available
|
||||
self.available = True
|
||||
if not was_available:
|
||||
|
||||
@@ -9,13 +9,49 @@ from soco.events_base import Event as SonosEvent, parse_event_xml
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventStatistics:
|
||||
class SonosStatistics:
|
||||
"""Base class of Sonos statistics."""
|
||||
|
||||
def __init__(self, zone_name: str, kind: str) -> None:
|
||||
"""Initialize SonosStatistics."""
|
||||
self._stats = {}
|
||||
self._stat_type = kind
|
||||
self.zone_name = zone_name
|
||||
|
||||
def report(self) -> dict:
|
||||
"""Generate a report for use in diagnostics."""
|
||||
return self._stats.copy()
|
||||
|
||||
def log_report(self) -> None:
|
||||
"""Log statistics for this speaker."""
|
||||
_LOGGER.debug(
|
||||
"%s statistics for %s: %s",
|
||||
self._stat_type,
|
||||
self.zone_name,
|
||||
self.report(),
|
||||
)
|
||||
|
||||
|
||||
class ActivityStatistics(SonosStatistics):
|
||||
"""Representation of Sonos activity statistics."""
|
||||
|
||||
def __init__(self, zone_name: str) -> None:
|
||||
"""Initialize ActivityStatistics."""
|
||||
super().__init__(zone_name, "Activity")
|
||||
|
||||
def activity(self, source: str, timestamp: float) -> None:
|
||||
"""Track an activity occurrence."""
|
||||
activity_entry = self._stats.setdefault(source, {"count": 0})
|
||||
activity_entry["count"] += 1
|
||||
activity_entry["last_seen"] = timestamp
|
||||
|
||||
|
||||
class EventStatistics(SonosStatistics):
|
||||
"""Representation of Sonos event statistics."""
|
||||
|
||||
def __init__(self, zone_name: str) -> None:
|
||||
"""Initialize EventStatistics."""
|
||||
self._stats = {}
|
||||
self.zone_name = zone_name
|
||||
super().__init__(zone_name, "Event")
|
||||
|
||||
def receive(self, event: SonosEvent) -> None:
|
||||
"""Mark a received event by subscription type."""
|
||||
@@ -38,11 +74,3 @@ class EventStatistics:
|
||||
payload["soco:from_didl_string"] = from_didl_string.cache_info()
|
||||
payload["soco:parse_event_xml"] = parse_event_xml.cache_info()
|
||||
return payload
|
||||
|
||||
def log_report(self) -> None:
|
||||
"""Log event statistics for this speaker."""
|
||||
_LOGGER.debug(
|
||||
"Event statistics for %s: %s",
|
||||
self.zone_name,
|
||||
self.report(),
|
||||
)
|
||||
|
||||
@@ -25,6 +25,10 @@
|
||||
"hostname": "k[lp]*",
|
||||
"macaddress": "403F8C*"
|
||||
},
|
||||
{
|
||||
"hostname": "k[lp]*",
|
||||
"macaddress": "C0C9E3*"
|
||||
},
|
||||
{
|
||||
"hostname": "ep*",
|
||||
"macaddress": "E848B8*"
|
||||
|
||||
@@ -137,7 +137,7 @@ class TuyaFanEntity(TuyaEntity, FanEntity):
|
||||
[
|
||||
{
|
||||
"code": self._speed.dpcode,
|
||||
"value": self._speed.scale_value_back(percentage),
|
||||
"value": int(self._speed.remap_value_from(percentage, 0, 100)),
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
@@ -31,24 +31,42 @@ REDACT_WLANS = {"bc_filter_list", "x_passphrase"}
|
||||
|
||||
|
||||
@callback
|
||||
def async_replace_data(data: Mapping, to_replace: dict[str, str]) -> dict[str, Any]:
|
||||
"""Replace sensitive data in a dict."""
|
||||
if not isinstance(data, (Mapping, list, set, tuple)):
|
||||
return to_replace.get(data, data)
|
||||
|
||||
def async_replace_dict_data(
|
||||
data: Mapping, to_replace: dict[str, str]
|
||||
) -> dict[str, Any]:
|
||||
"""Redact sensitive data in a dict."""
|
||||
redacted = {**data}
|
||||
|
||||
for key, value in redacted.items():
|
||||
for key, value in data.items():
|
||||
if isinstance(value, dict):
|
||||
redacted[key] = async_replace_data(value, to_replace)
|
||||
redacted[key] = async_replace_dict_data(value, to_replace)
|
||||
elif isinstance(value, (list, set, tuple)):
|
||||
redacted[key] = [async_replace_data(item, to_replace) for item in value]
|
||||
redacted[key] = async_replace_list_data(value, to_replace)
|
||||
elif isinstance(value, str):
|
||||
if value in to_replace:
|
||||
redacted[key] = to_replace[value]
|
||||
elif value.count(":") == 5:
|
||||
redacted[key] = REDACTED
|
||||
return redacted
|
||||
|
||||
|
||||
@callback
|
||||
def async_replace_list_data(
|
||||
data: list | set | tuple, to_replace: dict[str, str]
|
||||
) -> list[Any]:
|
||||
"""Redact sensitive data in a list."""
|
||||
redacted = []
|
||||
for item in data:
|
||||
new_value = None
|
||||
if isinstance(item, (list, set, tuple)):
|
||||
new_value = async_replace_list_data(item, to_replace)
|
||||
elif isinstance(item, Mapping):
|
||||
new_value = async_replace_dict_data(item, to_replace)
|
||||
elif isinstance(item, str):
|
||||
if item in to_replace:
|
||||
new_value = to_replace[item]
|
||||
elif item.count(":") == 5:
|
||||
new_value = REDACTED
|
||||
redacted.append(new_value or item)
|
||||
return redacted
|
||||
|
||||
|
||||
@@ -73,26 +91,28 @@ async def async_get_config_entry_diagnostics(
|
||||
counter += 1
|
||||
|
||||
diag["config"] = async_redact_data(
|
||||
async_replace_data(config_entry.as_dict(), macs_to_redact), REDACT_CONFIG
|
||||
async_replace_dict_data(config_entry.as_dict(), macs_to_redact), REDACT_CONFIG
|
||||
)
|
||||
diag["site_role"] = controller.site_role
|
||||
diag["entities"] = async_replace_data(controller.entities, macs_to_redact)
|
||||
diag["entities"] = async_replace_dict_data(controller.entities, macs_to_redact)
|
||||
diag["clients"] = {
|
||||
macs_to_redact[k]: async_redact_data(
|
||||
async_replace_data(v.raw, macs_to_redact), REDACT_CLIENTS
|
||||
async_replace_dict_data(v.raw, macs_to_redact), REDACT_CLIENTS
|
||||
)
|
||||
for k, v in controller.api.clients.items()
|
||||
}
|
||||
diag["devices"] = {
|
||||
macs_to_redact[k]: async_redact_data(
|
||||
async_replace_data(v.raw, macs_to_redact), REDACT_DEVICES
|
||||
async_replace_dict_data(v.raw, macs_to_redact), REDACT_DEVICES
|
||||
)
|
||||
for k, v in controller.api.devices.items()
|
||||
}
|
||||
diag["dpi_apps"] = {k: v.raw for k, v in controller.api.dpi_apps.items()}
|
||||
diag["dpi_groups"] = {k: v.raw for k, v in controller.api.dpi_groups.items()}
|
||||
diag["wlans"] = {
|
||||
k: async_redact_data(async_replace_data(v.raw, macs_to_redact), REDACT_WLANS)
|
||||
k: async_redact_data(
|
||||
async_replace_dict_data(v.raw, macs_to_redact), REDACT_WLANS
|
||||
)
|
||||
for k, v in controller.api.wlans.items()
|
||||
}
|
||||
|
||||
|
||||
@@ -57,6 +57,9 @@ async def async_reconnect_client(hass, data) -> None:
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entry = device_registry.async_get(data[ATTR_DEVICE_ID])
|
||||
|
||||
if device_entry is None:
|
||||
return
|
||||
|
||||
mac = ""
|
||||
for connection in device_entry.connections:
|
||||
if connection[0] == CONNECTION_NETWORK_MAC:
|
||||
|
||||
@@ -101,6 +101,15 @@ def _build_entity(name, vicare_api, circuit, device_config, heating_type):
|
||||
return ViCareClimate(name, vicare_api, device_config, circuit, heating_type)
|
||||
|
||||
|
||||
def _get_circuits(vicare_api):
|
||||
"""Return the list of circuits."""
|
||||
try:
|
||||
return vicare_api.circuits
|
||||
except PyViCareNotSupportedFeatureError:
|
||||
_LOGGER.info("No circuits found")
|
||||
return []
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -108,25 +117,23 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the ViCare climate platform."""
|
||||
name = VICARE_NAME
|
||||
|
||||
entities = []
|
||||
api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API]
|
||||
circuits = await hass.async_add_executor_job(_get_circuits, api)
|
||||
|
||||
try:
|
||||
for circuit in hass.data[DOMAIN][config_entry.entry_id][VICARE_API].circuits:
|
||||
suffix = ""
|
||||
if len(hass.data[DOMAIN][config_entry.entry_id][VICARE_API].circuits) > 1:
|
||||
suffix = f" {circuit.id}"
|
||||
entity = _build_entity(
|
||||
f"{name} Heating{suffix}",
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_API],
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG],
|
||||
circuit,
|
||||
config_entry.data[CONF_HEATING_TYPE],
|
||||
)
|
||||
if entity is not None:
|
||||
entities.append(entity)
|
||||
except PyViCareNotSupportedFeatureError:
|
||||
_LOGGER.info("No circuits found")
|
||||
for circuit in circuits:
|
||||
suffix = ""
|
||||
if len(circuits) > 1:
|
||||
suffix = f" {circuit.id}"
|
||||
|
||||
entity = _build_entity(
|
||||
f"{name} Heating{suffix}",
|
||||
api,
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG],
|
||||
circuit,
|
||||
config_entry.data[CONF_HEATING_TYPE],
|
||||
)
|
||||
entities.append(entity)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
|
||||
@@ -68,6 +68,15 @@ def _build_entity(name, vicare_api, circuit, device_config, heating_type):
|
||||
)
|
||||
|
||||
|
||||
def _get_circuits(vicare_api):
|
||||
"""Return the list of circuits."""
|
||||
try:
|
||||
return vicare_api.circuits
|
||||
except PyViCareNotSupportedFeatureError:
|
||||
_LOGGER.info("No circuits found")
|
||||
return []
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -75,24 +84,23 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the ViCare climate platform."""
|
||||
name = VICARE_NAME
|
||||
|
||||
entities = []
|
||||
try:
|
||||
for circuit in hass.data[DOMAIN][config_entry.entry_id][VICARE_API].circuits:
|
||||
suffix = ""
|
||||
if len(hass.data[DOMAIN][config_entry.entry_id][VICARE_API].circuits) > 1:
|
||||
suffix = f" {circuit.id}"
|
||||
entity = _build_entity(
|
||||
f"{name} Water{suffix}",
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_API],
|
||||
circuit,
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG],
|
||||
config_entry.data[CONF_HEATING_TYPE],
|
||||
)
|
||||
if entity is not None:
|
||||
entities.append(entity)
|
||||
except PyViCareNotSupportedFeatureError:
|
||||
_LOGGER.info("No circuits found")
|
||||
api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API]
|
||||
circuits = await hass.async_add_executor_job(_get_circuits, api)
|
||||
|
||||
for circuit in circuits:
|
||||
suffix = ""
|
||||
if len(circuits) > 1:
|
||||
suffix = f" {circuit.id}"
|
||||
|
||||
entity = _build_entity(
|
||||
f"{name} Water{suffix}",
|
||||
api,
|
||||
circuit,
|
||||
hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG],
|
||||
config_entry.data[CONF_HEATING_TYPE],
|
||||
)
|
||||
entities.append(entity)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import async_control_connect
|
||||
from .const import CONF_SOURCES, DEFAULT_NAME, DOMAIN, WEBOSTV_EXCEPTIONS
|
||||
from .helpers import async_get_sources
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -178,11 +179,14 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
options_input = {CONF_SOURCES: user_input[CONF_SOURCES]}
|
||||
return self.async_create_entry(title="", data=options_input)
|
||||
# Get sources
|
||||
sources = self.options.get(CONF_SOURCES, "")
|
||||
sources_list = await async_get_sources(self.host, self.key)
|
||||
if not sources_list:
|
||||
errors["base"] = "cannot_retrieve"
|
||||
|
||||
sources = [s for s in self.options.get(CONF_SOURCES, []) if s in sources_list]
|
||||
if not sources:
|
||||
sources = sources_list
|
||||
|
||||
options_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
@@ -195,16 +199,3 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
return self.async_show_form(
|
||||
step_id="init", data_schema=options_schema, errors=errors
|
||||
)
|
||||
|
||||
|
||||
async def async_get_sources(host: str, key: str) -> list[str]:
|
||||
"""Construct sources list."""
|
||||
try:
|
||||
client = await async_control_connect(host, key)
|
||||
except WEBOSTV_EXCEPTIONS:
|
||||
return []
|
||||
|
||||
return [
|
||||
*(app["title"] for app in client.apps.values()),
|
||||
*(app["label"] for app in client.inputs.values()),
|
||||
]
|
||||
|
||||
@@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from . import WebOsClientWrapper
|
||||
from .const import DATA_CONFIG_ENTRY, DOMAIN
|
||||
from . import WebOsClientWrapper, async_control_connect
|
||||
from .const import DATA_CONFIG_ENTRY, DOMAIN, LIVE_TV_APP_ID, WEBOSTV_EXCEPTIONS
|
||||
|
||||
|
||||
@callback
|
||||
@@ -81,3 +81,29 @@ def async_get_client_wrapper_by_device_entry(
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_sources(host: str, key: str) -> list[str]:
|
||||
"""Construct sources list."""
|
||||
try:
|
||||
client = await async_control_connect(host, key)
|
||||
except WEBOSTV_EXCEPTIONS:
|
||||
return []
|
||||
|
||||
sources = []
|
||||
found_live_tv = False
|
||||
for app in client.apps.values():
|
||||
sources.append(app["title"])
|
||||
if app["id"] == LIVE_TV_APP_ID:
|
||||
found_live_tv = True
|
||||
|
||||
for source in client.inputs.values():
|
||||
sources.append(source["label"])
|
||||
if source["appId"] == LIVE_TV_APP_ID:
|
||||
found_live_tv = True
|
||||
|
||||
if not found_live_tv:
|
||||
sources.append("Live TV")
|
||||
|
||||
# Preserve order when filtering duplicates
|
||||
return list(dict.fromkeys(sources))
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LG webOS Smart TV",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/webostv",
|
||||
"requirements": ["aiowebostv==0.1.1", "sqlalchemy==1.4.27"],
|
||||
"requirements": ["aiowebostv==0.1.2", "sqlalchemy==1.4.27"],
|
||||
"codeowners": ["@bendavid", "@thecode"],
|
||||
"ssdp": [{"st": "urn:lge-com:service:webos-second-screen:1"}],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -87,7 +87,7 @@ SENSORS: tuple[WhoisSensorEntityDescription, ...] = (
|
||||
icon="mdi:account-star",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda domain: domain.admin if domain.admin else None,
|
||||
value_fn=lambda domain: getattr(domain, "admin", None),
|
||||
),
|
||||
WhoisSensorEntityDescription(
|
||||
key="creation_date",
|
||||
@@ -123,7 +123,7 @@ SENSORS: tuple[WhoisSensorEntityDescription, ...] = (
|
||||
icon="mdi:account",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda domain: domain.owner if domain.owner else None,
|
||||
value_fn=lambda domain: getattr(domain, "owner", None),
|
||||
),
|
||||
WhoisSensorEntityDescription(
|
||||
key="registrant",
|
||||
@@ -131,7 +131,7 @@ SENSORS: tuple[WhoisSensorEntityDescription, ...] = (
|
||||
icon="mdi:account-edit",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda domain: domain.registrant if domain.registrant else None,
|
||||
value_fn=lambda domain: getattr(domain, "registrant", None),
|
||||
),
|
||||
WhoisSensorEntityDescription(
|
||||
key="registrar",
|
||||
@@ -147,7 +147,7 @@ SENSORS: tuple[WhoisSensorEntityDescription, ...] = (
|
||||
icon="mdi:store",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda domain: domain.reseller if domain.reseller else None,
|
||||
value_fn=lambda domain: getattr(domain, "reseller", None),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -190,7 +190,6 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in SENSORS
|
||||
],
|
||||
update_before_add=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
|
||||
from aiohttp.web import Request, Response
|
||||
import voluptuous as vol
|
||||
from withings_api import WithingsAuth
|
||||
from withings_api import AbstractWithingsApi, WithingsAuth
|
||||
from withings_api.common import NotifyAppli
|
||||
|
||||
from homeassistant.components import webhook
|
||||
@@ -84,7 +84,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
conf[CONF_CLIENT_ID],
|
||||
conf[CONF_CLIENT_SECRET],
|
||||
f"{WithingsAuth.URL}/oauth2_user/authorize2",
|
||||
f"{WithingsAuth.URL}/oauth2/token",
|
||||
f"{AbstractWithingsApi.URL}/v2/oauth2",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -1111,3 +1111,46 @@ class WithingsLocalOAuth2Implementation(LocalOAuth2Implementation):
|
||||
"""Return the redirect uri."""
|
||||
url = get_url(self.hass, allow_internal=False, prefer_cloud=True)
|
||||
return f"{url}{AUTH_CALLBACK_PATH}"
|
||||
|
||||
async def _token_request(self, data: dict) -> dict:
|
||||
"""Make a token request and adapt Withings API reply."""
|
||||
new_token = await super()._token_request(data)
|
||||
# Withings API returns habitual token data under json key "body":
|
||||
# {
|
||||
# "status": [{integer} Withings API response status],
|
||||
# "body": {
|
||||
# "access_token": [{string} Your new access_token],
|
||||
# "expires_in": [{integer} Access token expiry delay in seconds],
|
||||
# "token_type": [{string] HTTP Authorization Header format: Bearer],
|
||||
# "scope": [{string} Scopes the user accepted],
|
||||
# "refresh_token": [{string} Your new refresh_token],
|
||||
# "userid": [{string} The Withings ID of the user]
|
||||
# }
|
||||
# }
|
||||
# so we copy that to token root.
|
||||
if body := new_token.pop("body", None):
|
||||
new_token.update(body)
|
||||
return new_token
|
||||
|
||||
async def async_resolve_external_data(self, external_data: Any) -> dict:
|
||||
"""Resolve the authorization code to tokens."""
|
||||
return await self._token_request(
|
||||
{
|
||||
"action": "requesttoken",
|
||||
"grant_type": "authorization_code",
|
||||
"code": external_data["code"],
|
||||
"redirect_uri": external_data["state"]["redirect_uri"],
|
||||
}
|
||||
)
|
||||
|
||||
async def _async_refresh_token(self, token: dict) -> dict:
|
||||
"""Refresh tokens."""
|
||||
new_token = await self._token_request(
|
||||
{
|
||||
"action": "requesttoken",
|
||||
"grant_type": "refresh_token",
|
||||
"client_id": self.client_id,
|
||||
"refresh_token": token["refresh_token"],
|
||||
}
|
||||
)
|
||||
return {**token, **new_token}
|
||||
|
||||
@@ -15,7 +15,6 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor config entry."""
|
||||
|
||||
entities = await async_create_entities(
|
||||
hass,
|
||||
entry,
|
||||
|
||||
@@ -9,6 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from . import XiaomiDevice
|
||||
from .const import DOMAIN, GATEWAYS_KEY
|
||||
@@ -181,6 +182,11 @@ class XiaomiNatgasSensor(XiaomiBinarySensor):
|
||||
attrs.update(super().extra_state_attributes)
|
||||
return attrs
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
self._state = False
|
||||
|
||||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway."""
|
||||
if DENSITY in data:
|
||||
@@ -232,6 +238,11 @@ class XiaomiMotionSensor(XiaomiBinarySensor):
|
||||
self._state = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
self._state = False
|
||||
|
||||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway.
|
||||
|
||||
@@ -293,7 +304,7 @@ class XiaomiMotionSensor(XiaomiBinarySensor):
|
||||
return True
|
||||
|
||||
|
||||
class XiaomiDoorSensor(XiaomiBinarySensor):
|
||||
class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity):
|
||||
"""Representation of a XiaomiDoorSensor."""
|
||||
|
||||
def __init__(self, device, xiaomi_hub, config_entry):
|
||||
@@ -319,6 +330,15 @@ class XiaomiDoorSensor(XiaomiBinarySensor):
|
||||
attrs.update(super().extra_state_attributes)
|
||||
return attrs
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is None:
|
||||
return
|
||||
|
||||
self._state = state.state == "on"
|
||||
|
||||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway."""
|
||||
self._should_poll = False
|
||||
@@ -362,6 +382,11 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor):
|
||||
config_entry,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
self._state = False
|
||||
|
||||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway."""
|
||||
self._should_poll = False
|
||||
@@ -400,6 +425,11 @@ class XiaomiSmokeSensor(XiaomiBinarySensor):
|
||||
attrs.update(super().extra_state_attributes)
|
||||
return attrs
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
self._state = False
|
||||
|
||||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway."""
|
||||
if DENSITY in data:
|
||||
|
||||
@@ -161,7 +161,10 @@ class YaleOptionsFlowHandler(OptionsFlow):
|
||||
errors = {}
|
||||
|
||||
if user_input:
|
||||
if len(user_input[CONF_CODE]) not in [0, user_input[CONF_LOCK_CODE_DIGITS]]:
|
||||
if len(user_input.get(CONF_CODE, "")) not in [
|
||||
0,
|
||||
user_input[CONF_LOCK_CODE_DIGITS],
|
||||
]:
|
||||
errors["base"] = "code_format_mismatch"
|
||||
else:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
@@ -171,7 +174,10 @@ class YaleOptionsFlowHandler(OptionsFlow):
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CODE, default=self.entry.options.get(CONF_CODE)
|
||||
CONF_CODE,
|
||||
description={
|
||||
"suggested_value": self.entry.options.get(CONF_CODE)
|
||||
},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_LOCK_CODE_DIGITS,
|
||||
|
||||
@@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0b1"
|
||||
PATCH_VERSION: Final = "0b4"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
||||
@@ -46,6 +46,11 @@ DHCP = [
|
||||
"hostname": "blink*",
|
||||
"macaddress": "B85F98*"
|
||||
},
|
||||
{
|
||||
"domain": "blink",
|
||||
"hostname": "blink*",
|
||||
"macaddress": "00037F*"
|
||||
},
|
||||
{
|
||||
"domain": "broadlink",
|
||||
"macaddress": "34EA34*"
|
||||
@@ -201,6 +206,11 @@ DHCP = [
|
||||
"domain": "nuki",
|
||||
"hostname": "nuki_bridge_*"
|
||||
},
|
||||
{
|
||||
"domain": "oncue",
|
||||
"hostname": "kohlergen*",
|
||||
"macaddress": "00146F*"
|
||||
},
|
||||
{
|
||||
"domain": "overkiz",
|
||||
"hostname": "gateway*",
|
||||
@@ -250,6 +260,11 @@ DHCP = [
|
||||
"hostname": "roomba-*",
|
||||
"macaddress": "80A589*"
|
||||
},
|
||||
{
|
||||
"domain": "roomba",
|
||||
"hostname": "roomba-*",
|
||||
"macaddress": "DCF505*"
|
||||
},
|
||||
{
|
||||
"domain": "samsungtv",
|
||||
"hostname": "tizen*"
|
||||
@@ -392,6 +407,11 @@ DHCP = [
|
||||
"hostname": "k[lp]*",
|
||||
"macaddress": "403F8C*"
|
||||
},
|
||||
{
|
||||
"domain": "tplink",
|
||||
"hostname": "k[lp]*",
|
||||
"macaddress": "C0C9E3*"
|
||||
},
|
||||
{
|
||||
"domain": "tplink",
|
||||
"hostname": "ep*",
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=60.5", "wheel~=0.37.1"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.black]
|
||||
target-version = ["py38"]
|
||||
exclude = 'generated'
|
||||
|
||||
@@ -278,7 +278,7 @@ aiovlc==0.1.0
|
||||
aiowatttime==0.1.1
|
||||
|
||||
# homeassistant.components.webostv
|
||||
aiowebostv==0.1.1
|
||||
aiowebostv==0.1.2
|
||||
|
||||
# homeassistant.components.yandex_transport
|
||||
aioymaps==1.2.2
|
||||
@@ -764,7 +764,7 @@ google-cloud-pubsub==2.9.0
|
||||
google-cloud-texttospeech==0.4.0
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==1.5.1
|
||||
google-nest-sdm==1.6.0
|
||||
|
||||
# homeassistant.components.google_travel_time
|
||||
googlemaps==2.5.1
|
||||
@@ -2190,7 +2190,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==2021.12.2
|
||||
simplisafe-python==2022.01.0
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==3.1.2
|
||||
@@ -2496,7 +2496,7 @@ xbox-webapi==2.0.11
|
||||
xboxapi==2.0.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.19.0
|
||||
xknx==0.19.1
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
|
||||
@@ -213,7 +213,7 @@ aiovlc==0.1.0
|
||||
aiowatttime==0.1.1
|
||||
|
||||
# homeassistant.components.webostv
|
||||
aiowebostv==0.1.1
|
||||
aiowebostv==0.1.2
|
||||
|
||||
# homeassistant.components.yandex_transport
|
||||
aioymaps==1.2.2
|
||||
@@ -492,7 +492,7 @@ google-api-python-client==1.6.4
|
||||
google-cloud-pubsub==2.9.0
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==1.5.1
|
||||
google-nest-sdm==1.6.0
|
||||
|
||||
# homeassistant.components.google_travel_time
|
||||
googlemaps==2.5.1
|
||||
@@ -1337,7 +1337,7 @@ sharkiqpy==0.1.8
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==2021.12.2
|
||||
simplisafe-python==2022.01.0
|
||||
|
||||
# homeassistant.components.slack
|
||||
slackclient==2.5.0
|
||||
@@ -1527,7 +1527,7 @@ wolf_smartset==0.1.11
|
||||
xbox-webapi==2.0.11
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==0.19.0
|
||||
xknx==0.19.1
|
||||
|
||||
# homeassistant.components.bluesound
|
||||
# homeassistant.components.fritz
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate an updated requirements_all.txt."""
|
||||
import configparser
|
||||
import difflib
|
||||
import importlib
|
||||
import os
|
||||
@@ -167,10 +168,9 @@ def explore_module(package, explore_children):
|
||||
|
||||
def core_requirements():
|
||||
"""Gather core requirements out of setup.py."""
|
||||
reqs_raw = re.search(
|
||||
r"REQUIRES = \[(.*?)\]", Path("setup.py").read_text(), re.S
|
||||
).group(1)
|
||||
return [x[1] for x in re.findall(r"(['\"])(.*?)\1", reqs_raw)]
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read("setup.cfg")
|
||||
return parser["options"]["install_requires"].strip().split("\n")
|
||||
|
||||
|
||||
def gather_recursive_requirements(domain, seen=None):
|
||||
|
||||
@@ -12,6 +12,7 @@ from . import (
|
||||
dhcp,
|
||||
json,
|
||||
manifest,
|
||||
metadata,
|
||||
mqtt,
|
||||
mypy_config,
|
||||
requirements,
|
||||
@@ -41,6 +42,7 @@ INTEGRATION_PLUGINS = [
|
||||
HASS_PLUGINS = [
|
||||
coverage,
|
||||
mypy_config,
|
||||
metadata,
|
||||
]
|
||||
|
||||
|
||||
|
||||
31
script/hassfest/metadata.py
Normal file
31
script/hassfest/metadata.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Package metadata validation."""
|
||||
import configparser
|
||||
|
||||
from homeassistant.const import REQUIRED_PYTHON_VER, __version__
|
||||
|
||||
from .model import Config, Integration
|
||||
|
||||
|
||||
def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||
"""Validate project metadata keys."""
|
||||
metadata_path = config.root / "setup.cfg"
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(metadata_path)
|
||||
|
||||
try:
|
||||
if parser["metadata"]["version"] != __version__:
|
||||
config.add_error(
|
||||
"metadata", f"'metadata.version' value does not match '{__version__}'"
|
||||
)
|
||||
except KeyError:
|
||||
config.add_error("metadata", "No 'metadata.version' key found!")
|
||||
|
||||
required_py_version = f">={'.'.join(map(str, REQUIRED_PYTHON_VER))}"
|
||||
try:
|
||||
if parser["options"]["python_requires"] != required_py_version:
|
||||
config.add_error(
|
||||
"metadata",
|
||||
f"'options.python_requires' value doesn't match '{required_py_version}",
|
||||
)
|
||||
except KeyError:
|
||||
config.add_error("metadata", "No 'options.python_requires' key found!")
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Pushes a new version to PyPi.
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
head -n 5 homeassistant/const.py | tail -n 1 | grep PATCH_VERSION > /dev/null
|
||||
|
||||
if [ $? -eq 1 ]
|
||||
then
|
||||
echo "Patch version not found on const.py line 5"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
head -n 5 homeassistant/const.py | tail -n 1 | grep dev > /dev/null
|
||||
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
echo "Release version should not contain dev tag"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_BRANCH=`git rev-parse --abbrev-ref HEAD`
|
||||
|
||||
if [ "$CURRENT_BRANCH" != "master" ] && [ "$CURRENT_BRANCH" != "rc" ]
|
||||
then
|
||||
echo "You have to be on the master or rc branch to release."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -rf dist build
|
||||
python3 setup.py sdist bdist_wheel
|
||||
python3 -m twine upload dist/* --skip-existing
|
||||
@@ -117,7 +117,18 @@ def write_version(version):
|
||||
)
|
||||
|
||||
with open("homeassistant/const.py", "wt") as fil:
|
||||
content = fil.write(content)
|
||||
fil.write(content)
|
||||
|
||||
|
||||
def write_version_metadata(version: Version) -> None:
|
||||
"""Update setup.cfg file with new version."""
|
||||
with open("setup.cfg") as fp:
|
||||
content = fp.read()
|
||||
|
||||
content = re.sub(r"(version\W+=\W).+\n", f"\\g<1>{version}\n", content, count=1)
|
||||
|
||||
with open("setup.cfg", "w") as fp:
|
||||
fp.write(content)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -142,6 +153,7 @@ def main():
|
||||
assert bumped > current, "BUG! New version is not newer than old version"
|
||||
|
||||
write_version(bumped)
|
||||
write_version_metadata(bumped)
|
||||
|
||||
if not arguments.commit:
|
||||
return
|
||||
|
||||
53
setup.cfg
53
setup.cfg
@@ -1,10 +1,21 @@
|
||||
[metadata]
|
||||
name = homeassistant
|
||||
version = 2022.2.0b4
|
||||
author = The Home Assistant Authors
|
||||
author_email = hello@home-assistant.io
|
||||
license = Apache-2.0
|
||||
license_file = LICENSE.md
|
||||
platforms = any
|
||||
description = Open-source home automation platform running on Python 3.
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
keywords = home, automation
|
||||
url = https://www.home-assistant.io/
|
||||
project_urls =
|
||||
Source Code = https://github.com/home-assistant/core
|
||||
Bug Reports = https://github.com/home-assistant/core/issues
|
||||
Docs: Dev = https://developers.home-assistant.io/
|
||||
Discord = https://discordapp.com/invite/c5DvZ4e
|
||||
Forum = https://community.home-assistant.io/
|
||||
classifier =
|
||||
Development Status :: 4 - Beta
|
||||
Intended Audience :: End Users/Desktop
|
||||
@@ -14,6 +25,46 @@ classifier =
|
||||
Programming Language :: Python :: 3.9
|
||||
Topic :: Home Automation
|
||||
|
||||
[options]
|
||||
packages = find:
|
||||
zip_safe = False
|
||||
include_package_data = True
|
||||
python_requires = >=3.9.0
|
||||
install_requires =
|
||||
aiohttp==3.8.1
|
||||
astral==2.2
|
||||
async_timeout==4.0.2
|
||||
attrs==21.2.0
|
||||
atomicwrites==1.4.0
|
||||
awesomeversion==22.1.0
|
||||
bcrypt==3.1.7
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.2.0
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
httpx==0.21.3
|
||||
ifaddr==0.1.7
|
||||
jinja2==3.0.3
|
||||
PyJWT==2.1.0
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
cryptography==35.0.0
|
||||
pip>=8.0.3,<20.3
|
||||
python-slugify==4.0.1
|
||||
pyyaml==6.0
|
||||
requests==2.27.1
|
||||
typing-extensions>=3.10.0.2,<5.0
|
||||
voluptuous==0.12.2
|
||||
voluptuous-serialize==2.5.0
|
||||
yarl==1.7.2
|
||||
|
||||
[options.packages.find]
|
||||
include =
|
||||
homeassistant*
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
hass = homeassistant.__main__:main
|
||||
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
max-complexity = 25
|
||||
|
||||
84
setup.py
Executable file → Normal file
84
setup.py
Executable file → Normal file
@@ -1,79 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Home Assistant setup script."""
|
||||
from datetime import datetime as dt
|
||||
"""
|
||||
Entry point for setuptools. Required for editable installs.
|
||||
TODO: Remove file after updating to pip 21.3
|
||||
"""
|
||||
from setuptools import setup
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
import homeassistant.const as hass_const
|
||||
|
||||
PROJECT_NAME = "Home Assistant"
|
||||
PROJECT_PACKAGE_NAME = "homeassistant"
|
||||
PROJECT_LICENSE = "Apache License 2.0"
|
||||
PROJECT_AUTHOR = "The Home Assistant Authors"
|
||||
PROJECT_COPYRIGHT = f" 2013-{dt.now().year}, {PROJECT_AUTHOR}"
|
||||
PROJECT_URL = "https://www.home-assistant.io/"
|
||||
PROJECT_EMAIL = "hello@home-assistant.io"
|
||||
|
||||
PROJECT_GITHUB_USERNAME = "home-assistant"
|
||||
PROJECT_GITHUB_REPOSITORY = "core"
|
||||
|
||||
PYPI_URL = f"https://pypi.python.org/pypi/{PROJECT_PACKAGE_NAME}"
|
||||
GITHUB_PATH = f"{PROJECT_GITHUB_USERNAME}/{PROJECT_GITHUB_REPOSITORY}"
|
||||
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
|
||||
|
||||
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{hass_const.__version__}.zip"
|
||||
PROJECT_URLS = {
|
||||
"Bug Reports": f"{GITHUB_URL}/issues",
|
||||
"Dev Docs": "https://developers.home-assistant.io/",
|
||||
"Discord": "https://discordapp.com/invite/c5DvZ4e",
|
||||
"Forum": "https://community.home-assistant.io/",
|
||||
}
|
||||
|
||||
PACKAGES = find_packages(exclude=["tests", "tests.*"])
|
||||
|
||||
REQUIRES = [
|
||||
"aiohttp==3.8.1",
|
||||
"astral==2.2",
|
||||
"async_timeout==4.0.2",
|
||||
"attrs==21.2.0",
|
||||
"atomicwrites==1.4.0",
|
||||
"awesomeversion==22.1.0",
|
||||
"bcrypt==3.1.7",
|
||||
"certifi>=2021.5.30",
|
||||
"ciso8601==2.2.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.21.3",
|
||||
"ifaddr==0.1.7",
|
||||
"jinja2==3.0.3",
|
||||
"PyJWT==2.1.0",
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
"cryptography==35.0.0",
|
||||
"pip>=8.0.3,<20.3",
|
||||
"python-slugify==4.0.1",
|
||||
"pyyaml==6.0",
|
||||
"requests==2.27.1",
|
||||
"typing-extensions>=3.10.0.2,<5.0",
|
||||
"voluptuous==0.12.2",
|
||||
"voluptuous-serialize==2.5.0",
|
||||
"yarl==1.7.2",
|
||||
]
|
||||
|
||||
MIN_PY_VERSION = ".".join(map(str, hass_const.REQUIRED_PYTHON_VER))
|
||||
|
||||
setup(
|
||||
name=PROJECT_PACKAGE_NAME,
|
||||
version=hass_const.__version__,
|
||||
url=PROJECT_URL,
|
||||
download_url=DOWNLOAD_URL,
|
||||
project_urls=PROJECT_URLS,
|
||||
author=PROJECT_AUTHOR,
|
||||
author_email=PROJECT_EMAIL,
|
||||
packages=PACKAGES,
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=REQUIRES,
|
||||
python_requires=f">={MIN_PY_VERSION}",
|
||||
test_suite="tests",
|
||||
entry_points={"console_scripts": ["hass = homeassistant.__main__:main"]},
|
||||
)
|
||||
setup()
|
||||
|
||||
@@ -55,6 +55,7 @@ async def test_entry_diagnostics(
|
||||
str(Platform.LIGHT): [],
|
||||
str(Platform.LOCK): [],
|
||||
str(Platform.NUMBER): [],
|
||||
str(Platform.SCENE): [],
|
||||
str(Platform.SENSOR): [],
|
||||
str(Platform.SIREN): [],
|
||||
str(Platform.SWITCH): [],
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
|
||||
from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .test_gateway import (
|
||||
DECONZ_WEB_REQUEST,
|
||||
@@ -58,3 +60,30 @@ async def test_scenes(hass, aioclient_mock):
|
||||
await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
assert len(hass.states.async_all()) == 0
|
||||
|
||||
|
||||
async def test_only_new_scenes_are_created(hass, aioclient_mock):
|
||||
"""Test that scenes works."""
|
||||
data = {
|
||||
"groups": {
|
||||
"1": {
|
||||
"id": "Light group id",
|
||||
"name": "Light group",
|
||||
"type": "LightGroup",
|
||||
"state": {"all_on": False, "any_on": True},
|
||||
"action": {},
|
||||
"scenes": [{"id": "1", "name": "Scene"}],
|
||||
"lights": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
with patch.dict(DECONZ_WEB_REQUEST, data):
|
||||
config_entry = await setup_deconz_integration(hass, aioclient_mock)
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
|
||||
gateway = get_gateway_from_config_entry(hass, config_entry)
|
||||
async_dispatcher_send(hass, gateway.signal_new_scene)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
|
||||
33
tests/components/diagnostics/test_util.py
Normal file
33
tests/components/diagnostics/test_util.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Test Diagnostics utils."""
|
||||
from homeassistant.components.diagnostics import REDACTED, async_redact_data
|
||||
|
||||
|
||||
def test_redact():
|
||||
"""Test the async_redact_data helper."""
|
||||
data = {
|
||||
"key1": "value1",
|
||||
"key2": ["value2_a", "value2_b"],
|
||||
"key3": [["value_3a", "value_3b"], ["value_3c", "value_3d"]],
|
||||
"key4": {
|
||||
"key4_1": "value4_1",
|
||||
"key4_2": ["value4_2a", "value4_2b"],
|
||||
"key4_3": [["value4_3a", "value4_3b"], ["value4_3c", "value4_3d"]],
|
||||
},
|
||||
}
|
||||
|
||||
to_redact = {
|
||||
"key1",
|
||||
"key3",
|
||||
"key4_1",
|
||||
}
|
||||
|
||||
assert async_redact_data(data, to_redact) == {
|
||||
"key1": REDACTED,
|
||||
"key2": ["value2_a", "value2_b"],
|
||||
"key3": REDACTED,
|
||||
"key4": {
|
||||
"key4_1": REDACTED,
|
||||
"key4_2": ["value4_2a", "value4_2b"],
|
||||
"key4_3": [["value4_3a", "value4_3b"], ["value4_3c", "value4_3d"]],
|
||||
},
|
||||
}
|
||||
@@ -7,10 +7,16 @@ from unittest.mock import patch
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import flux_led
|
||||
from homeassistant.components.flux_led.const import DOMAIN
|
||||
from homeassistant.components.flux_led.const import (
|
||||
CONF_REMOTE_ACCESS_ENABLED,
|
||||
CONF_REMOTE_ACCESS_HOST,
|
||||
CONF_REMOTE_ACCESS_PORT,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
@@ -156,3 +162,46 @@ async def test_time_sync_startup_and_next_day(hass: HomeAssistant) -> None:
|
||||
async_fire_time_changed(hass, utcnow() + timedelta(hours=24))
|
||||
await hass.async_block_till_done()
|
||||
assert len(bulb.async_set_time.mock_calls) == 2
|
||||
|
||||
|
||||
async def test_unique_id_migrate_when_mac_discovered(hass: HomeAssistant) -> None:
|
||||
"""Test unique id migrated when mac discovered."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_REMOTE_ACCESS_HOST: "any",
|
||||
CONF_REMOTE_ACCESS_ENABLED: True,
|
||||
CONF_REMOTE_ACCESS_PORT: 1234,
|
||||
CONF_HOST: IP_ADDRESS,
|
||||
CONF_NAME: DEFAULT_ENTRY_TITLE,
|
||||
},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
bulb = _mocked_bulb()
|
||||
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert not config_entry.unique_id
|
||||
entity_registry = er.async_get(hass)
|
||||
assert (
|
||||
entity_registry.async_get("light.bulb_rgbcw_ddeeff").unique_id
|
||||
== config_entry.entry_id
|
||||
)
|
||||
assert (
|
||||
entity_registry.async_get("switch.bulb_rgbcw_ddeeff_remote_access").unique_id
|
||||
== f"{config_entry.entry_id}_remote_access"
|
||||
)
|
||||
|
||||
with _patch_discovery(), _patch_wifibulb(device=bulb):
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
entity_registry.async_get("light.bulb_rgbcw_ddeeff").unique_id
|
||||
== config_entry.unique_id
|
||||
)
|
||||
assert (
|
||||
entity_registry.async_get("switch.bulb_rgbcw_ddeeff_remote_access").unique_id
|
||||
== f"{config_entry.unique_id}_remote_access"
|
||||
)
|
||||
|
||||
@@ -137,8 +137,8 @@ async def test_light_goes_unavailable_and_recovers(hass: HomeAssistant) -> None:
|
||||
assert state.state == STATE_ON
|
||||
|
||||
|
||||
async def test_light_no_unique_id(hass: HomeAssistant) -> None:
|
||||
"""Test a light without a unique id."""
|
||||
async def test_light_mac_address_not_found(hass: HomeAssistant) -> None:
|
||||
"""Test a light when we cannot discover the mac address."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE}
|
||||
)
|
||||
@@ -150,7 +150,7 @@ async def test_light_no_unique_id(hass: HomeAssistant) -> None:
|
||||
|
||||
entity_id = "light.bulb_rgbcw_ddeeff"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert entity_registry.async_get(entity_id) is None
|
||||
assert entity_registry.async_get(entity_id).unique_id == config_entry.entry_id
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == STATE_ON
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ from . import (
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_number_unique_id(hass: HomeAssistant) -> None:
|
||||
async def test_effects_speed_unique_id(hass: HomeAssistant) -> None:
|
||||
"""Test a number unique id."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
@@ -59,6 +59,23 @@ async def test_number_unique_id(hass: HomeAssistant) -> None:
|
||||
assert entity_registry.async_get(entity_id).unique_id == MAC_ADDRESS
|
||||
|
||||
|
||||
async def test_effects_speed_unique_id_no_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test a number unique id."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
bulb = _mocked_bulb()
|
||||
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "number.bulb_rgbcw_ddeeff_effect_speed"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert entity_registry.async_get(entity_id).unique_id == config_entry.entry_id
|
||||
|
||||
|
||||
async def test_rgb_light_effect_speed(hass: HomeAssistant) -> None:
|
||||
"""Test an rgb light with an effect."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.flux_led.const import CONF_WHITE_CHANNEL_TYPE, DOM
|
||||
from homeassistant.components.select import DOMAIN as SELECT_DOMAIN
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, CONF_HOST, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import (
|
||||
@@ -67,6 +68,47 @@ async def test_switch_power_restore_state(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
|
||||
async def test_power_restored_unique_id(hass: HomeAssistant) -> None:
|
||||
"""Test a select unique id."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
|
||||
unique_id=MAC_ADDRESS,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
switch = _mocked_switch()
|
||||
with _patch_discovery(), _patch_wifibulb(device=switch):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "select.bulb_rgbcw_ddeeff_power_restored"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert (
|
||||
entity_registry.async_get(entity_id).unique_id
|
||||
== f"{MAC_ADDRESS}_power_restored"
|
||||
)
|
||||
|
||||
|
||||
async def test_power_restored_unique_id_no_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test a select unique id."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
switch = _mocked_switch()
|
||||
with _patch_discovery(no_device=True), _patch_wifibulb(device=switch):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "select.bulb_rgbcw_ddeeff_power_restored"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert (
|
||||
entity_registry.async_get(entity_id).unique_id
|
||||
== f"{config_entry.entry_id}_power_restored"
|
||||
)
|
||||
|
||||
|
||||
async def test_select_addressable_strip_config(hass: HomeAssistant) -> None:
|
||||
"""Test selecting addressable strip configs."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
from flux_led.const import MODE_MUSIC
|
||||
|
||||
from homeassistant.components import flux_led
|
||||
from homeassistant.components.flux_led.const import CONF_REMOTE_ACCESS_ENABLED, DOMAIN
|
||||
from homeassistant.components.flux_led.const import (
|
||||
CONF_REMOTE_ACCESS_ENABLED,
|
||||
CONF_REMOTE_ACCESS_HOST,
|
||||
CONF_REMOTE_ACCESS_PORT,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -12,6 +17,7 @@ from homeassistant.const import (
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import (
|
||||
@@ -65,11 +71,69 @@ async def test_switch_on_off(hass: HomeAssistant) -> None:
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
|
||||
|
||||
async def test_remote_access_unique_id(hass: HomeAssistant) -> None:
|
||||
"""Test a remote access switch unique id."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_REMOTE_ACCESS_HOST: "any",
|
||||
CONF_REMOTE_ACCESS_ENABLED: True,
|
||||
CONF_REMOTE_ACCESS_PORT: 1234,
|
||||
CONF_HOST: IP_ADDRESS,
|
||||
CONF_NAME: DEFAULT_ENTRY_TITLE,
|
||||
},
|
||||
unique_id=MAC_ADDRESS,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
bulb = _mocked_bulb()
|
||||
with _patch_discovery(), _patch_wifibulb(device=bulb):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "switch.bulb_rgbcw_ddeeff_remote_access"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert (
|
||||
entity_registry.async_get(entity_id).unique_id == f"{MAC_ADDRESS}_remote_access"
|
||||
)
|
||||
|
||||
|
||||
async def test_effects_speed_unique_id_no_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test a remote access switch unique id when discovery fails."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_REMOTE_ACCESS_HOST: "any",
|
||||
CONF_REMOTE_ACCESS_ENABLED: True,
|
||||
CONF_REMOTE_ACCESS_PORT: 1234,
|
||||
CONF_HOST: IP_ADDRESS,
|
||||
CONF_NAME: DEFAULT_ENTRY_TITLE,
|
||||
},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
bulb = _mocked_bulb()
|
||||
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = "switch.bulb_rgbcw_ddeeff_remote_access"
|
||||
entity_registry = er.async_get(hass)
|
||||
assert (
|
||||
entity_registry.async_get(entity_id).unique_id
|
||||
== f"{config_entry.entry_id}_remote_access"
|
||||
)
|
||||
|
||||
|
||||
async def test_remote_access_on_off(hass: HomeAssistant) -> None:
|
||||
"""Test enable/disable remote access."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
|
||||
data={
|
||||
CONF_REMOTE_ACCESS_HOST: "any",
|
||||
CONF_REMOTE_ACCESS_ENABLED: True,
|
||||
CONF_REMOTE_ACCESS_PORT: 1234,
|
||||
CONF_HOST: IP_ADDRESS,
|
||||
CONF_NAME: DEFAULT_ENTRY_TITLE,
|
||||
},
|
||||
unique_id=MAC_ADDRESS,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -1514,3 +1514,34 @@ async def test_query_recover(hass, caplog):
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_proxy_selected(hass, caplog):
|
||||
"""Test that we handle proxy selected."""
|
||||
|
||||
result = await sh.async_handle_message(
|
||||
hass,
|
||||
BASIC_CONFIG,
|
||||
"test-agent",
|
||||
{
|
||||
"requestId": REQ_ID,
|
||||
"inputs": [
|
||||
{
|
||||
"intent": "action.devices.PROXY_SELECTED",
|
||||
"payload": {
|
||||
"device": {
|
||||
"id": "abcdefg",
|
||||
"customData": {},
|
||||
},
|
||||
"structureData": {},
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
const.SOURCE_LOCAL,
|
||||
)
|
||||
|
||||
assert result == {
|
||||
"requestId": REQ_ID,
|
||||
"payload": {},
|
||||
}
|
||||
|
||||
@@ -35,16 +35,16 @@ async def test_connectsense_setup(hass):
|
||||
devices=[],
|
||||
entities=[
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.inwall_outlet_0394de_real_time_current",
|
||||
friendly_name="InWall Outlet-0394DE Real Time Current",
|
||||
entity_id="sensor.inwall_outlet_0394de_current",
|
||||
friendly_name="InWall Outlet-0394DE Current",
|
||||
unique_id="homekit-1020301376-aid:1-sid:13-cid:18",
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
|
||||
state="0.03",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.inwall_outlet_0394de_real_time_energy",
|
||||
friendly_name="InWall Outlet-0394DE Real Time Energy",
|
||||
entity_id="sensor.inwall_outlet_0394de_power",
|
||||
friendly_name="InWall Outlet-0394DE Power",
|
||||
unique_id="homekit-1020301376-aid:1-sid:13-cid:19",
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
unit_of_measurement=POWER_WATT,
|
||||
@@ -65,16 +65,16 @@ async def test_connectsense_setup(hass):
|
||||
state="on",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.inwall_outlet_0394de_real_time_current_2",
|
||||
friendly_name="InWall Outlet-0394DE Real Time Current",
|
||||
entity_id="sensor.inwall_outlet_0394de_current_2",
|
||||
friendly_name="InWall Outlet-0394DE Current",
|
||||
unique_id="homekit-1020301376-aid:1-sid:25-cid:30",
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
|
||||
state="0.05",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.inwall_outlet_0394de_real_time_energy_2",
|
||||
friendly_name="InWall Outlet-0394DE Real Time Energy",
|
||||
entity_id="sensor.inwall_outlet_0394de_power_2",
|
||||
friendly_name="InWall Outlet-0394DE Power",
|
||||
unique_id="homekit-1020301376-aid:1-sid:25-cid:31",
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
unit_of_measurement=POWER_WATT,
|
||||
|
||||
@@ -59,9 +59,9 @@ async def test_eve_degree_setup(hass):
|
||||
state="0.400000005960464",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.eve_energy_50ff_real_time_energy",
|
||||
entity_id="sensor.eve_energy_50ff_power",
|
||||
unique_id="homekit-AA00A0A00000-aid:1-sid:28-cid:34",
|
||||
friendly_name="Eve Energy 50FF Real Time Energy",
|
||||
friendly_name="Eve Energy 50FF Power",
|
||||
unit_of_measurement=POWER_WATT,
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
state="0",
|
||||
|
||||
@@ -37,8 +37,8 @@ async def test_koogeek_p1eu_setup(hass):
|
||||
state="off",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.koogeek_p1_a00aa0_real_time_energy",
|
||||
friendly_name="Koogeek-P1-A00AA0 Real Time Energy",
|
||||
entity_id="sensor.koogeek_p1_a00aa0_power",
|
||||
friendly_name="Koogeek-P1-A00AA0 Power",
|
||||
unique_id="homekit-EUCP03190xxxxx48-aid:1-sid:21-cid:22",
|
||||
unit_of_measurement=POWER_WATT,
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
|
||||
@@ -43,8 +43,8 @@ async def test_koogeek_sw2_setup(hass):
|
||||
state="off",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.koogeek_sw2_187a91_real_time_energy",
|
||||
friendly_name="Koogeek-SW2-187A91 Real Time Energy",
|
||||
entity_id="sensor.koogeek_sw2_187a91_power",
|
||||
friendly_name="Koogeek-SW2-187A91 Power",
|
||||
unique_id="homekit-CNNT061751001372-aid:1-sid:14-cid:18",
|
||||
unit_of_measurement=POWER_WATT,
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
|
||||
@@ -37,8 +37,8 @@ async def test_vocolinc_vp3_setup(hass):
|
||||
state="on",
|
||||
),
|
||||
EntityTestInfo(
|
||||
entity_id="sensor.vocolinc_vp3_123456_real_time_energy",
|
||||
friendly_name="VOCOlinc-VP3-123456 Real Time Energy",
|
||||
entity_id="sensor.vocolinc_vp3_123456_power",
|
||||
friendly_name="VOCOlinc-VP3-123456 Power",
|
||||
unique_id="homekit-EU0121203xxxxx07-aid:1-sid:48-cid:97",
|
||||
unit_of_measurement=POWER_WATT,
|
||||
capabilities={"state_class": SensorStateClass.MEASUREMENT},
|
||||
|
||||
@@ -4,8 +4,11 @@ from unittest.mock import patch
|
||||
|
||||
from aiohomekit.model.characteristics import CharacteristicsTypes
|
||||
from aiohomekit.model.services import ServicesTypes
|
||||
from aiohomekit.testing import FakeController
|
||||
|
||||
from homeassistant.components.homekit_controller.const import ENTITY_MAP
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.components.homekit_controller.common import setup_test_component
|
||||
|
||||
@@ -27,3 +30,24 @@ async def test_unload_on_stop(hass, utcnow):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert async_unlock_mock.called
|
||||
|
||||
|
||||
async def test_async_remove_entry(hass: HomeAssistant):
|
||||
"""Test unpairing a component."""
|
||||
helper = await setup_test_component(hass, create_motion_sensor_service)
|
||||
|
||||
hkid = "00:00:00:00:00:00"
|
||||
|
||||
with patch("aiohomekit.Controller") as controller_cls:
|
||||
# Setup a fake controller with 1 pairing
|
||||
controller = controller_cls.return_value = FakeController()
|
||||
await controller.add_paired_device([helper.accessory], hkid)
|
||||
assert len(controller.pairings) == 1
|
||||
|
||||
assert hkid in hass.data[ENTITY_MAP].storage_data
|
||||
|
||||
# Remove it via config entry and number of pairings should go down
|
||||
await helper.config_entry.async_remove(hass)
|
||||
assert len(controller.pairings) == 0
|
||||
|
||||
assert hkid not in hass.data[ENTITY_MAP].storage_data
|
||||
|
||||
@@ -218,7 +218,7 @@ async def test_switch_with_sensor(hass, utcnow):
|
||||
# Helper will be for the primary entity, which is the outlet. Make a helper for the sensor.
|
||||
energy_helper = Helper(
|
||||
hass,
|
||||
"sensor.testdevice_real_time_energy",
|
||||
"sensor.testdevice_power",
|
||||
helper.pairing,
|
||||
helper.accessory,
|
||||
helper.config_entry,
|
||||
@@ -248,7 +248,7 @@ async def test_sensor_unavailable(hass, utcnow):
|
||||
# Helper will be for the primary entity, which is the outlet. Make a helper for the sensor.
|
||||
energy_helper = Helper(
|
||||
hass,
|
||||
"sensor.testdevice_real_time_energy",
|
||||
"sensor.testdevice_power",
|
||||
helper.pairing,
|
||||
helper.accessory,
|
||||
helper.config_entry,
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
from aiohomekit.model.characteristics import CharacteristicsTypes
|
||||
from aiohomekit.model.services import ServicesTypes
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.homekit_controller import async_remove_entry
|
||||
from homeassistant.components.homekit_controller.const import ENTITY_MAP
|
||||
|
||||
from tests.common import flush_store
|
||||
@@ -79,26 +77,3 @@ async def test_storage_is_updated_on_add(hass, hass_storage, utcnow):
|
||||
# Is saved out to store?
|
||||
await flush_store(entity_map.store)
|
||||
assert hkid in hass_storage[ENTITY_MAP]["data"]["pairings"]
|
||||
|
||||
|
||||
async def test_storage_is_removed_on_config_entry_removal(hass, utcnow):
|
||||
"""Test entity map storage is cleaned up on config entry removal."""
|
||||
await setup_test_component(hass, create_lightbulb_service)
|
||||
|
||||
hkid = "00:00:00:00:00:00"
|
||||
|
||||
pairing_data = {"AccessoryPairingID": hkid}
|
||||
|
||||
entry = config_entries.ConfigEntry(
|
||||
1,
|
||||
"homekit_controller",
|
||||
"TestData",
|
||||
pairing_data,
|
||||
"test",
|
||||
)
|
||||
|
||||
assert hkid in hass.data[ENTITY_MAP].storage_data
|
||||
|
||||
await async_remove_entry(hass, entry)
|
||||
|
||||
assert hkid not in hass.data[ENTITY_MAP].storage_data
|
||||
|
||||
@@ -16,7 +16,12 @@ from homeassistant.components.isy994.const import (
|
||||
ISY_URL_POSTFIX,
|
||||
UDN_UUID_PREFIX,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_DHCP, SOURCE_IMPORT, SOURCE_SSDP
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_DHCP,
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_IMPORT,
|
||||
SOURCE_SSDP,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -595,3 +600,27 @@ async def test_form_dhcp_existing_entry_preserves_port(hass: HomeAssistant):
|
||||
assert result["reason"] == "already_configured"
|
||||
assert entry.data[CONF_HOST] == f"http://1.2.3.4:1443{ISY_URL_POSTFIX}"
|
||||
assert entry.data[CONF_USERNAME] == "bob"
|
||||
|
||||
|
||||
async def test_form_dhcp_existing_ignored_entry(hass: HomeAssistant):
|
||||
"""Test we handled an ignored entry from dhcp."""
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN, data={}, unique_id=MOCK_UUID, source=SOURCE_IGNORE
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
with patch(PATCH_CONNECTION, return_value=MOCK_CONFIG_RESPONSE):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_DHCP},
|
||||
data=dhcp.DhcpServiceInfo(
|
||||
ip="1.2.3.4",
|
||||
hostname="isy994-ems",
|
||||
macaddress=MOCK_MAC,
|
||||
),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user