forked from home-assistant/core
Compare commits
95 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
245eec7041 | ||
|
|
493309daa7 | ||
|
|
af68802c17 | ||
|
|
576cece7a9 | ||
|
|
3b9859940f | ||
|
|
a315fd059a | ||
|
|
ba9ef004c8 | ||
|
|
22f745b17c | ||
|
|
05cf223146 | ||
|
|
d4aadd8af0 | ||
|
|
4045eee2e5 | ||
|
|
83a51f7f30 | ||
|
|
29110fe157 | ||
|
|
e87b7e24b4 | ||
|
|
d9056c01a6 | ||
|
|
a724bc21b6 | ||
|
|
ef00178339 | ||
|
|
b8770c3958 | ||
|
|
f0c0cfcac0 | ||
|
|
4c48ad9108 | ||
|
|
92b0453749 | ||
|
|
8ab801a7b4 | ||
|
|
f92c7b1aea | ||
|
|
0d9fbf864f | ||
|
|
275f9c8a28 | ||
|
|
84f3b1514f | ||
|
|
802f5613c4 | ||
|
|
8be40cbb00 | ||
|
|
46ce4e92f6 | ||
|
|
39f11bb46d | ||
|
|
3b0fe9adde | ||
|
|
707778229b | ||
|
|
a474534c08 | ||
|
|
65ad99d51c | ||
|
|
4052a0db89 | ||
|
|
b546fc5067 | ||
|
|
5dcc760755 | ||
|
|
fb06acf39d | ||
|
|
948f191f16 | ||
|
|
2c0d9105ac | ||
|
|
10df9f3542 | ||
|
|
6cf799459b | ||
|
|
47e2d1caa5 | ||
|
|
69d8f94e3b | ||
|
|
4b7803ed03 | ||
|
|
ff6015ff89 | ||
|
|
fbd144de46 | ||
|
|
adaebdeea8 | ||
|
|
910cb5865a | ||
|
|
baf0d9b2d9 | ||
|
|
c1bce68549 | ||
|
|
bde4c0e46f | ||
|
|
a275e7aa67 | ||
|
|
d96e416d26 | ||
|
|
efc3894303 | ||
|
|
06b47ee2f5 | ||
|
|
08ca43221f | ||
|
|
8641740ed8 | ||
|
|
d0ada6c6e2 | ||
|
|
76bb036968 | ||
|
|
d8b64be41c | ||
|
|
b3e0b7b86e | ||
|
|
e097e4c1c2 | ||
|
|
34f0fecef8 | ||
|
|
f53a10d39a | ||
|
|
5b993129d6 | ||
|
|
865656d436 | ||
|
|
fb25c6c115 | ||
|
|
c963cf8743 | ||
|
|
ddb28db21a | ||
|
|
bfc98b444f | ||
|
|
f9a0f44137 | ||
|
|
93750d71ce | ||
|
|
06e4003640 | ||
|
|
97ff5e2085 | ||
|
|
8a2c07ce19 | ||
|
|
9f7398e0df | ||
|
|
7df84dadad | ||
|
|
2a1e943b18 | ||
|
|
e6e72bfa82 | ||
|
|
219868b308 | ||
|
|
67dd861d8c | ||
|
|
f2765ba320 | ||
|
|
aefd3df914 | ||
|
|
3658eeb8d1 | ||
|
|
080cb6b6e9 | ||
|
|
20796303da | ||
|
|
dff6151ff4 | ||
|
|
6f24f4e302 | ||
|
|
175febe635 | ||
|
|
aa907f4d10 | ||
|
|
3d09478aea | ||
|
|
05df9b4b8b | ||
|
|
1865a28083 | ||
|
|
f78d57515a |
@@ -248,6 +248,7 @@ homeassistant/components/integration/* @dgomes
|
||||
homeassistant/components/intent/* @home-assistant/core
|
||||
homeassistant/components/intesishome/* @jnimmo
|
||||
homeassistant/components/ios/* @robbiet480
|
||||
homeassistant/components/iotawatt/* @gtdiehl
|
||||
homeassistant/components/iperf3/* @rohankapoorcom
|
||||
homeassistant/components/ipma/* @dgomes @abmantis
|
||||
homeassistant/components/ipp/* @ctalkington
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
"""Sensor platform for Advantage Air integration."""
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import STATE_CLASS_MEASUREMENT, SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, TEMP_CELSIUS
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
|
||||
@@ -138,11 +142,11 @@ class AdvantageAirZoneSignal(AdvantageAirEntity, SensorEntity):
|
||||
|
||||
|
||||
class AdvantageAirZoneTemp(AdvantageAirEntity, SensorEntity):
|
||||
"""Representation of Advantage Air Zone wireless signal sensor."""
|
||||
"""Representation of Advantage Air Zone temperature sensor."""
|
||||
|
||||
_attr_native_unit_of_measurement = TEMP_CELSIUS
|
||||
_attr_device_class = DEVICE_CLASS_TEMPERATURE
|
||||
_attr_state_class = STATE_CLASS_MEASUREMENT
|
||||
_attr_icon = "mdi:thermometer"
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def __init__(self, instance, ac_key, zone_key):
|
||||
|
||||
@@ -319,6 +319,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
config_entry.data[CONF_API_KEY],
|
||||
config_entry.data[CONF_APP_KEY],
|
||||
session=session,
|
||||
logger=LOGGER,
|
||||
),
|
||||
)
|
||||
hass.loop.create_task(ambient.ws_connect())
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Ambient Weather Station",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ambient_station",
|
||||
"requirements": ["aioambient==1.2.6"],
|
||||
"requirements": ["aioambient==1.3.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_push"
|
||||
}
|
||||
|
||||
@@ -88,6 +88,7 @@ class ArestSwitchBase(SwitchEntity):
|
||||
self._resource = resource
|
||||
self._attr_name = f"{location.title()} {name.title()}"
|
||||
self._attr_available = True
|
||||
self._attr_is_on = False
|
||||
|
||||
|
||||
class ArestSwitchFunction(ArestSwitchBase):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bmw_connected_drive",
|
||||
"name": "BMW Connected Drive",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"requirements": ["bimmer_connected==0.7.19"],
|
||||
"requirements": ["bimmer_connected==0.7.20"],
|
||||
"codeowners": ["@gerard33", "@rikroe"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
|
||||
@@ -142,9 +142,6 @@ class BroadlinkSwitch(BroadlinkEntity, SwitchEntity, RestoreEntity, ABC):
|
||||
super().__init__(device)
|
||||
self._command_on = command_on
|
||||
self._command_off = command_off
|
||||
|
||||
self._attr_assumed_state = True
|
||||
self._attr_device_class = DEVICE_CLASS_SWITCH
|
||||
self._attr_name = f"{device.name} Switch"
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
|
||||
@@ -699,7 +699,7 @@ class BrSensor(SensorEntity):
|
||||
@callback
|
||||
def data_updated(self, data):
|
||||
"""Update data."""
|
||||
if self._load_data(data) and self.hass:
|
||||
if self.hass and self._load_data(data):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
|
||||
@@ -165,10 +165,7 @@ async def _async_get_image(
|
||||
width=width, height=height
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"The camera entity %s does not support requesting width and height, please open an issue with the integration author",
|
||||
camera.entity_id,
|
||||
)
|
||||
camera.async_warn_old_async_camera_image_signature()
|
||||
image_bytes = await camera.async_camera_image()
|
||||
|
||||
if image_bytes:
|
||||
@@ -381,6 +378,7 @@ class Camera(Entity):
|
||||
self.stream_options: dict[str, str] = {}
|
||||
self.content_type: str = DEFAULT_CONTENT_TYPE
|
||||
self.access_tokens: collections.deque = collections.deque([], 2)
|
||||
self._warned_old_signature = False
|
||||
self.async_update_token()
|
||||
|
||||
@property
|
||||
@@ -455,11 +453,20 @@ class Camera(Entity):
|
||||
return await self.hass.async_add_executor_job(
|
||||
partial(self.camera_image, width=width, height=height)
|
||||
)
|
||||
self.async_warn_old_async_camera_image_signature()
|
||||
return await self.hass.async_add_executor_job(self.camera_image)
|
||||
|
||||
# Remove in 2022.1 after all custom components have had a chance to change their signature
|
||||
@callback
|
||||
def async_warn_old_async_camera_image_signature(self) -> None:
|
||||
"""Warn once when calling async_camera_image with the function old signature."""
|
||||
if self._warned_old_signature:
|
||||
return
|
||||
_LOGGER.warning(
|
||||
"The camera entity %s does not support requesting width and height, please open an issue with the integration author",
|
||||
self.entity_id,
|
||||
)
|
||||
return await self.hass.async_add_executor_job(self.camera_image)
|
||||
self._warned_old_signature = True
|
||||
|
||||
async def handle_async_still_stream(
|
||||
self, request: web.Request, interval: float
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import CoreState, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
from homeassistant.helpers.typing import ConfigType, EventType, StateType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import (
|
||||
@@ -146,8 +146,15 @@ async def async_setup_entry(
|
||||
|
||||
if transport:
|
||||
# Register listener to close transport on HA shutdown
|
||||
@callback
|
||||
def close_transport(_event: EventType) -> None:
|
||||
"""Close the transport on HA shutdown."""
|
||||
if not transport:
|
||||
return
|
||||
transport.close()
|
||||
|
||||
stop_listener = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, transport.close
|
||||
EVENT_HOMEASSISTANT_STOP, close_transport
|
||||
)
|
||||
|
||||
# Wait for reader to close
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import APIVersion, LightColorMode, LightInfo, LightState
|
||||
from aioesphomeapi import APIVersion, LightColorCapability, LightInfo, LightState
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
@@ -34,12 +34,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import (
|
||||
EsphomeEntity,
|
||||
EsphomeEnumMapper,
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
|
||||
|
||||
FLASH_LENGTHS = {FLASH_SHORT: 2, FLASH_LONG: 10}
|
||||
|
||||
@@ -59,20 +54,81 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
_COLOR_MODES: EsphomeEnumMapper[LightColorMode, str] = EsphomeEnumMapper(
|
||||
{
|
||||
LightColorMode.UNKNOWN: COLOR_MODE_UNKNOWN,
|
||||
LightColorMode.ON_OFF: COLOR_MODE_ONOFF,
|
||||
LightColorMode.BRIGHTNESS: COLOR_MODE_BRIGHTNESS,
|
||||
LightColorMode.WHITE: COLOR_MODE_WHITE,
|
||||
LightColorMode.COLOR_TEMPERATURE: COLOR_MODE_COLOR_TEMP,
|
||||
LightColorMode.COLD_WARM_WHITE: COLOR_MODE_COLOR_TEMP,
|
||||
LightColorMode.RGB: COLOR_MODE_RGB,
|
||||
LightColorMode.RGB_WHITE: COLOR_MODE_RGBW,
|
||||
LightColorMode.RGB_COLOR_TEMPERATURE: COLOR_MODE_RGBWW,
|
||||
LightColorMode.RGB_COLD_WARM_WHITE: COLOR_MODE_RGBWW,
|
||||
}
|
||||
)
|
||||
_COLOR_MODE_MAPPING = {
|
||||
COLOR_MODE_ONOFF: [
|
||||
LightColorCapability.ON_OFF,
|
||||
],
|
||||
COLOR_MODE_BRIGHTNESS: [
|
||||
LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS,
|
||||
# for compatibility with older clients (2021.8.x)
|
||||
LightColorCapability.BRIGHTNESS,
|
||||
],
|
||||
COLOR_MODE_COLOR_TEMP: [
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.COLOR_TEMPERATURE,
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.COLD_WARM_WHITE,
|
||||
],
|
||||
COLOR_MODE_RGB: [
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.RGB,
|
||||
],
|
||||
COLOR_MODE_RGBW: [
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.RGB
|
||||
| LightColorCapability.WHITE,
|
||||
],
|
||||
COLOR_MODE_RGBWW: [
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.RGB
|
||||
| LightColorCapability.WHITE
|
||||
| LightColorCapability.COLOR_TEMPERATURE,
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.RGB
|
||||
| LightColorCapability.COLD_WARM_WHITE,
|
||||
],
|
||||
COLOR_MODE_WHITE: [
|
||||
LightColorCapability.ON_OFF
|
||||
| LightColorCapability.BRIGHTNESS
|
||||
| LightColorCapability.WHITE
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _color_mode_to_ha(mode: int) -> str:
|
||||
"""Convert an esphome color mode to a HA color mode constant.
|
||||
|
||||
Choses the color mode that best matches the feature-set.
|
||||
"""
|
||||
candidates = []
|
||||
for ha_mode, cap_lists in _COLOR_MODE_MAPPING.items():
|
||||
for caps in cap_lists:
|
||||
if caps == mode:
|
||||
# exact match
|
||||
return ha_mode
|
||||
if (mode & caps) == caps:
|
||||
# all requirements met
|
||||
candidates.append((ha_mode, caps))
|
||||
|
||||
if not candidates:
|
||||
return COLOR_MODE_UNKNOWN
|
||||
|
||||
# choose the color mode with the most bits set
|
||||
candidates.sort(key=lambda key: bin(key[1]).count("1"))
|
||||
return candidates[-1][0]
|
||||
|
||||
|
||||
def _filter_color_modes(
|
||||
supported: list[int], features: LightColorCapability
|
||||
) -> list[int]:
|
||||
"""Filter the given supported color modes, excluding all values that don't have the requested features."""
|
||||
return [mode for mode in supported if mode & features]
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
@@ -95,10 +151,17 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
data: dict[str, Any] = {"key": self._static_info.key, "state": True}
|
||||
# The list of color modes that would fit this service call
|
||||
color_modes = self._native_supported_color_modes
|
||||
try_keep_current_mode = True
|
||||
|
||||
# rgb/brightness input is in range 0-255, but esphome uses 0-1
|
||||
|
||||
if (brightness_ha := kwargs.get(ATTR_BRIGHTNESS)) is not None:
|
||||
data["brightness"] = brightness_ha / 255
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes, LightColorCapability.BRIGHTNESS
|
||||
)
|
||||
|
||||
if (rgb_ha := kwargs.get(ATTR_RGB_COLOR)) is not None:
|
||||
rgb = tuple(x / 255 for x in rgb_ha)
|
||||
@@ -106,8 +169,8 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
# normalize rgb
|
||||
data["rgb"] = tuple(x / (color_bri or 1) for x in rgb)
|
||||
data["color_brightness"] = color_bri
|
||||
if self._supports_color_mode:
|
||||
data["color_mode"] = LightColorMode.RGB
|
||||
color_modes = _filter_color_modes(color_modes, LightColorCapability.RGB)
|
||||
try_keep_current_mode = False
|
||||
|
||||
if (rgbw_ha := kwargs.get(ATTR_RGBW_COLOR)) is not None:
|
||||
# pylint: disable=invalid-name
|
||||
@@ -117,8 +180,10 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
data["rgb"] = tuple(x / (color_bri or 1) for x in rgb)
|
||||
data["white"] = w
|
||||
data["color_brightness"] = color_bri
|
||||
if self._supports_color_mode:
|
||||
data["color_mode"] = LightColorMode.RGB_WHITE
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes, LightColorCapability.RGB | LightColorCapability.WHITE
|
||||
)
|
||||
try_keep_current_mode = False
|
||||
|
||||
if (rgbww_ha := kwargs.get(ATTR_RGBWW_COLOR)) is not None:
|
||||
# pylint: disable=invalid-name
|
||||
@@ -126,14 +191,14 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
color_bri = max(rgb)
|
||||
# normalize rgb
|
||||
data["rgb"] = tuple(x / (color_bri or 1) for x in rgb)
|
||||
modes = self._native_supported_color_modes
|
||||
if (
|
||||
self._supports_color_mode
|
||||
and LightColorMode.RGB_COLD_WARM_WHITE in modes
|
||||
):
|
||||
color_modes = _filter_color_modes(color_modes, LightColorCapability.RGB)
|
||||
if _filter_color_modes(color_modes, LightColorCapability.COLD_WARM_WHITE):
|
||||
# Device supports setting cwww values directly
|
||||
data["cold_white"] = cw
|
||||
data["warm_white"] = ww
|
||||
target_mode = LightColorMode.RGB_COLD_WARM_WHITE
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes, LightColorCapability.COLD_WARM_WHITE
|
||||
)
|
||||
else:
|
||||
# need to convert cw+ww part to white+color_temp
|
||||
white = data["white"] = max(cw, ww)
|
||||
@@ -142,11 +207,13 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
max_ct = self.max_mireds
|
||||
ct_ratio = ww / (cw + ww)
|
||||
data["color_temperature"] = min_ct + ct_ratio * (max_ct - min_ct)
|
||||
target_mode = LightColorMode.RGB_COLOR_TEMPERATURE
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes,
|
||||
LightColorCapability.COLOR_TEMPERATURE | LightColorCapability.WHITE,
|
||||
)
|
||||
try_keep_current_mode = False
|
||||
|
||||
data["color_brightness"] = color_bri
|
||||
if self._supports_color_mode:
|
||||
data["color_mode"] = target_mode
|
||||
|
||||
if (flash := kwargs.get(ATTR_FLASH)) is not None:
|
||||
data["flash_length"] = FLASH_LENGTHS[flash]
|
||||
@@ -156,12 +223,15 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:
|
||||
data["color_temperature"] = color_temp
|
||||
if self._supports_color_mode:
|
||||
supported_modes = self._native_supported_color_modes
|
||||
if LightColorMode.COLOR_TEMPERATURE in supported_modes:
|
||||
data["color_mode"] = LightColorMode.COLOR_TEMPERATURE
|
||||
elif LightColorMode.COLD_WARM_WHITE in supported_modes:
|
||||
data["color_mode"] = LightColorMode.COLD_WARM_WHITE
|
||||
if _filter_color_modes(color_modes, LightColorCapability.COLOR_TEMPERATURE):
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes, LightColorCapability.COLOR_TEMPERATURE
|
||||
)
|
||||
else:
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes, LightColorCapability.COLD_WARM_WHITE
|
||||
)
|
||||
try_keep_current_mode = False
|
||||
|
||||
if (effect := kwargs.get(ATTR_EFFECT)) is not None:
|
||||
data["effect"] = effect
|
||||
@@ -171,7 +241,30 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
# HA only sends `white` in turn_on, and reads total brightness through brightness property
|
||||
data["brightness"] = white_ha / 255
|
||||
data["white"] = 1.0
|
||||
data["color_mode"] = LightColorMode.WHITE
|
||||
color_modes = _filter_color_modes(
|
||||
color_modes,
|
||||
LightColorCapability.BRIGHTNESS | LightColorCapability.WHITE,
|
||||
)
|
||||
try_keep_current_mode = False
|
||||
|
||||
if self._supports_color_mode and color_modes:
|
||||
# try the color mode with the least complexity (fewest capabilities set)
|
||||
# popcount with bin() function because it appears to be the best way: https://stackoverflow.com/a/9831671
|
||||
color_modes.sort(key=lambda mode: bin(mode).count("1"))
|
||||
data["color_mode"] = color_modes[0]
|
||||
if self._supports_color_mode and color_modes:
|
||||
if (
|
||||
try_keep_current_mode
|
||||
and self._state is not None
|
||||
and self._state.color_mode in color_modes
|
||||
):
|
||||
# if possible, stay with the color mode that is already set
|
||||
data["color_mode"] = self._state.color_mode
|
||||
else:
|
||||
# otherwise try the color mode with the least complexity (fewest capabilities set)
|
||||
# popcount with bin() function because it appears to be the best way: https://stackoverflow.com/a/9831671
|
||||
color_modes.sort(key=lambda mode: bin(mode).count("1"))
|
||||
data["color_mode"] = color_modes[0]
|
||||
|
||||
await self._client.light_command(**data)
|
||||
|
||||
@@ -198,7 +291,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
return None
|
||||
return next(iter(supported))
|
||||
|
||||
return _COLOR_MODES.from_esphome(self._state.color_mode)
|
||||
return _color_mode_to_ha(self._state.color_mode)
|
||||
|
||||
@esphome_state_property
|
||||
def rgb_color(self) -> tuple[int, int, int] | None:
|
||||
@@ -227,9 +320,8 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
def rgbww_color(self) -> tuple[int, int, int, int, int] | None:
|
||||
"""Return the rgbww color value [int, int, int, int, int]."""
|
||||
rgb = cast("tuple[int, int, int]", self.rgb_color)
|
||||
if (
|
||||
not self._supports_color_mode
|
||||
or self._state.color_mode != LightColorMode.RGB_COLD_WARM_WHITE
|
||||
if not _filter_color_modes(
|
||||
self._native_supported_color_modes, LightColorCapability.COLD_WARM_WHITE
|
||||
):
|
||||
# Try to reverse white + color temp to cwww
|
||||
min_ct = self._static_info.min_mireds
|
||||
@@ -262,7 +354,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
return self._state.effect
|
||||
|
||||
@property
|
||||
def _native_supported_color_modes(self) -> list[LightColorMode]:
|
||||
def _native_supported_color_modes(self) -> list[int]:
|
||||
return self._static_info.supported_color_modes_compat(self._api_version)
|
||||
|
||||
@property
|
||||
@@ -272,7 +364,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
|
||||
# All color modes except UNKNOWN,ON_OFF support transition
|
||||
modes = self._native_supported_color_modes
|
||||
if any(m not in (LightColorMode.UNKNOWN, LightColorMode.ON_OFF) for m in modes):
|
||||
if any(m not in (0, LightColorCapability.ON_OFF) for m in modes):
|
||||
flags |= SUPPORT_TRANSITION
|
||||
if self._static_info.effects:
|
||||
flags |= SUPPORT_EFFECT
|
||||
@@ -281,7 +373,14 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
@property
|
||||
def supported_color_modes(self) -> set[str] | None:
|
||||
"""Flag supported color modes."""
|
||||
return set(map(_COLOR_MODES.from_esphome, self._native_supported_color_modes))
|
||||
supported = set(map(_color_mode_to_ha, self._native_supported_color_modes))
|
||||
if COLOR_MODE_ONOFF in supported and len(supported) > 1:
|
||||
supported.remove(COLOR_MODE_ONOFF)
|
||||
if COLOR_MODE_BRIGHTNESS in supported and len(supported) > 1:
|
||||
supported.remove(COLOR_MODE_BRIGHTNESS)
|
||||
if COLOR_MODE_WHITE in supported and len(supported) == 1:
|
||||
supported.remove(COLOR_MODE_WHITE)
|
||||
return supported
|
||||
|
||||
@property
|
||||
def effect_list(self) -> list[str]:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "ESPHome",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/esphome",
|
||||
"requirements": ["aioesphomeapi==7.0.0"],
|
||||
"requirements": ["aioesphomeapi==8.0.0"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."],
|
||||
"codeowners": ["@OttoWinter", "@jesserockz"],
|
||||
"after_dependencies": ["zeroconf", "tag"],
|
||||
|
||||
@@ -5,7 +5,12 @@ import datetime
|
||||
import logging
|
||||
from typing import Callable, TypedDict
|
||||
|
||||
from fritzconnection.core.exceptions import FritzConnectionException
|
||||
from fritzconnection.core.exceptions import (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzConnectionException,
|
||||
FritzServiceError,
|
||||
)
|
||||
from fritzconnection.lib.fritzstatus import FritzStatus
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -108,28 +113,28 @@ def _retrieve_link_noise_margin_sent_state(
|
||||
status: FritzStatus, last_value: str
|
||||
) -> float:
|
||||
"""Return upload noise margin."""
|
||||
return status.noise_margin[0] # type: ignore[no-any-return]
|
||||
return status.noise_margin[0] / 10 # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def _retrieve_link_noise_margin_received_state(
|
||||
status: FritzStatus, last_value: str
|
||||
) -> float:
|
||||
"""Return download noise margin."""
|
||||
return status.noise_margin[1] # type: ignore[no-any-return]
|
||||
return status.noise_margin[1] / 10 # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def _retrieve_link_attenuation_sent_state(
|
||||
status: FritzStatus, last_value: str
|
||||
) -> float:
|
||||
"""Return upload line attenuation."""
|
||||
return status.attenuation[0] # type: ignore[no-any-return]
|
||||
return status.attenuation[0] / 10 # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def _retrieve_link_attenuation_received_state(
|
||||
status: FritzStatus, last_value: str
|
||||
) -> float:
|
||||
"""Return download line attenuation."""
|
||||
return status.attenuation[1] # type: ignore[no-any-return]
|
||||
return status.attenuation[1] / 10 # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class SensorData(TypedDict, total=False):
|
||||
@@ -260,12 +265,16 @@ async def async_setup_entry(
|
||||
return
|
||||
|
||||
entities = []
|
||||
dslinterface = await hass.async_add_executor_job(
|
||||
fritzbox_tools.connection.call_action,
|
||||
"WANDSLInterfaceConfig:1",
|
||||
"GetInfo",
|
||||
)
|
||||
dsl: bool = dslinterface["NewEnable"]
|
||||
dsl: bool = False
|
||||
try:
|
||||
dslinterface = await hass.async_add_executor_job(
|
||||
fritzbox_tools.connection.call_action,
|
||||
"WANDSLInterfaceConfig:1",
|
||||
"GetInfo",
|
||||
)
|
||||
dsl = dslinterface["NewEnable"]
|
||||
except (FritzActionError, FritzActionFailedError, FritzServiceError):
|
||||
pass
|
||||
|
||||
for sensor_type, sensor_data in SENSOR_DATA.items():
|
||||
if not dsl and sensor_data.get("connection_type") == DSL_CONNECTION:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20210825.0"
|
||||
"home-assistant-frontend==20210830.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
|
||||
@@ -133,6 +133,7 @@ DOMAIN_TO_GOOGLE_TYPES = {
|
||||
media_player.DOMAIN: TYPE_SETTOP,
|
||||
scene.DOMAIN: TYPE_SCENE,
|
||||
script.DOMAIN: TYPE_SCENE,
|
||||
sensor.DOMAIN: TYPE_SENSOR,
|
||||
select.DOMAIN: TYPE_SENSOR,
|
||||
switch.DOMAIN: TYPE_SWITCH,
|
||||
vacuum.DOMAIN: TYPE_VACUUM,
|
||||
|
||||
@@ -108,6 +108,7 @@ TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState"
|
||||
TRAIT_CHANNEL = f"{PREFIX_TRAITS}Channel"
|
||||
TRAIT_LOCATOR = f"{PREFIX_TRAITS}Locator"
|
||||
TRAIT_ENERGYSTORAGE = f"{PREFIX_TRAITS}EnergyStorage"
|
||||
TRAIT_SENSOR_STATE = f"{PREFIX_TRAITS}SensorState"
|
||||
|
||||
PREFIX_COMMANDS = "action.devices.commands."
|
||||
COMMAND_ONOFF = f"{PREFIX_COMMANDS}OnOff"
|
||||
@@ -2286,3 +2287,61 @@ class ChannelTrait(_Trait):
|
||||
blocking=True,
|
||||
context=data.context,
|
||||
)
|
||||
|
||||
|
||||
@register_trait
|
||||
class SensorStateTrait(_Trait):
|
||||
"""Trait to get sensor state.
|
||||
|
||||
https://developers.google.com/actions/smarthome/traits/sensorstate
|
||||
"""
|
||||
|
||||
sensor_types = {
|
||||
sensor.DEVICE_CLASS_AQI: ("AirQuality", "AQI"),
|
||||
sensor.DEVICE_CLASS_CO: ("CarbonDioxideLevel", "PARTS_PER_MILLION"),
|
||||
sensor.DEVICE_CLASS_CO2: ("CarbonMonoxideLevel", "PARTS_PER_MILLION"),
|
||||
sensor.DEVICE_CLASS_PM25: ("PM2.5", "MICROGRAMS_PER_CUBIC_METER"),
|
||||
sensor.DEVICE_CLASS_PM10: ("PM10", "MICROGRAMS_PER_CUBIC_METER"),
|
||||
sensor.DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS: (
|
||||
"VolatileOrganicCompounds",
|
||||
"PARTS_PER_MILLION",
|
||||
),
|
||||
}
|
||||
|
||||
name = TRAIT_SENSOR_STATE
|
||||
commands = []
|
||||
|
||||
@staticmethod
|
||||
def supported(domain, features, device_class, _):
|
||||
"""Test if state is supported."""
|
||||
return domain == sensor.DOMAIN and device_class in (
|
||||
sensor.DEVICE_CLASS_AQI,
|
||||
sensor.DEVICE_CLASS_CO,
|
||||
sensor.DEVICE_CLASS_CO2,
|
||||
sensor.DEVICE_CLASS_PM25,
|
||||
sensor.DEVICE_CLASS_PM10,
|
||||
sensor.DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
)
|
||||
|
||||
def sync_attributes(self):
|
||||
"""Return attributes for a sync request."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
if data is not None:
|
||||
return {
|
||||
"sensorStatesSupported": {
|
||||
"name": data[0],
|
||||
"numericCapabilities": {"rawValueUnit": data[1]},
|
||||
}
|
||||
}
|
||||
|
||||
def query_attributes(self):
|
||||
"""Return the attributes of this trait for this entity."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
if data is not None:
|
||||
return {
|
||||
"currentSensorStateData": [
|
||||
{"name": data[0], "rawValue": self.state.state}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ DEFAULT_NAME = "Growatt"
|
||||
|
||||
SERVER_URLS = [
|
||||
"https://server.growatt.com/",
|
||||
"https://server-us.growatt.com",
|
||||
"https://server-us.growatt.com/",
|
||||
"http://server.smten.com/",
|
||||
]
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.client import ClientTimeout
|
||||
from aiohttp.hdrs import (
|
||||
CACHE_CONTROL,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_LENGTH,
|
||||
CONTENT_TYPE,
|
||||
@@ -51,6 +52,8 @@ NO_AUTH = re.compile(
|
||||
r"^(?:" r"|app/.*" r"|addons/[^/]+/logo" r"|addons/[^/]+/icon" r")$"
|
||||
)
|
||||
|
||||
NO_STORE = re.compile(r"^(?:" r"|app/entrypoint.js" r")$")
|
||||
|
||||
|
||||
class HassIOView(HomeAssistantView):
|
||||
"""Hass.io view to handle base part."""
|
||||
@@ -104,7 +107,7 @@ class HassIOView(HomeAssistantView):
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(
|
||||
status=client.status, headers=_response_header(client)
|
||||
status=client.status, headers=_response_header(client, path)
|
||||
)
|
||||
response.content_type = client.content_type
|
||||
|
||||
@@ -139,7 +142,7 @@ def _init_header(request: web.Request) -> dict[str, str]:
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
def _response_header(response: aiohttp.ClientResponse, path: str) -> dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
@@ -153,6 +156,9 @@ def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
if NO_STORE.match(path):
|
||||
headers[CACHE_CONTROL] = "no-store, max-age=0"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
|
||||
@@ -498,7 +498,10 @@ async def _async_get_supported_devices(hass):
|
||||
"""Return all supported devices."""
|
||||
results = await device_automation.async_get_device_automations(hass, "trigger")
|
||||
dev_reg = device_registry.async_get(hass)
|
||||
unsorted = {device_id: dev_reg.async_get(device_id).name for device_id in results}
|
||||
unsorted = {
|
||||
device_id: dev_reg.async_get(device_id).name or device_id
|
||||
for device_id in results
|
||||
}
|
||||
return dict(sorted(unsorted.items(), key=lambda item: item[1]))
|
||||
|
||||
|
||||
|
||||
@@ -282,12 +282,14 @@ class HueLight(CoordinatorEntity, LightEntity):
|
||||
self.is_osram = False
|
||||
self.is_philips = False
|
||||
self.is_innr = False
|
||||
self.is_livarno = False
|
||||
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
|
||||
self.gamut = None
|
||||
else:
|
||||
self.is_osram = light.manufacturername == "OSRAM"
|
||||
self.is_philips = light.manufacturername == "Philips"
|
||||
self.is_innr = light.manufacturername == "innr"
|
||||
self.is_livarno = light.manufacturername.startswith("_TZ3000_")
|
||||
self.gamut_typ = self.light.colorgamuttype
|
||||
self.gamut = self.light.colorgamut
|
||||
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
|
||||
@@ -383,6 +385,8 @@ class HueLight(CoordinatorEntity, LightEntity):
|
||||
"""Return the warmest color_temp that this light supports."""
|
||||
if self.is_group:
|
||||
return super().max_mireds
|
||||
if self.is_livarno:
|
||||
return 500
|
||||
|
||||
max_mireds = self.light.controlcapabilities.get("ct", {}).get("max")
|
||||
|
||||
@@ -493,7 +497,7 @@ class HueLight(CoordinatorEntity, LightEntity):
|
||||
elif flash == FLASH_SHORT:
|
||||
command["alert"] = "select"
|
||||
del command["on"]
|
||||
elif not self.is_innr:
|
||||
elif not self.is_innr and not self.is_livarno:
|
||||
command["alert"] = "none"
|
||||
|
||||
if ATTR_EFFECT in kwargs:
|
||||
@@ -532,7 +536,7 @@ class HueLight(CoordinatorEntity, LightEntity):
|
||||
elif flash == FLASH_SHORT:
|
||||
command["alert"] = "select"
|
||||
del command["on"]
|
||||
elif not self.is_innr:
|
||||
elif not self.is_innr and not self.is_livarno:
|
||||
command["alert"] = "none"
|
||||
|
||||
if self.is_group:
|
||||
|
||||
@@ -177,8 +177,6 @@ class PowerViewShade(ShadeEntity, CoverEntity):
|
||||
"""Move the shade to a position."""
|
||||
current_hass_position = hd_position_to_hass(self._current_cover_position)
|
||||
steps_to_move = abs(current_hass_position - target_hass_position)
|
||||
if not steps_to_move:
|
||||
return
|
||||
self._async_schedule_update_for_transition(steps_to_move)
|
||||
self._async_update_from_command(
|
||||
await self._shade.move(
|
||||
|
||||
24
homeassistant/components/iotawatt/__init__.py
Normal file
24
homeassistant/components/iotawatt/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""The iotawatt integration."""
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import IotawattUpdater
|
||||
|
||||
PLATFORMS = ("sensor",)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up iotawatt from a config entry."""
|
||||
coordinator = IotawattUpdater(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
107
homeassistant/components/iotawatt/config_flow.py
Normal file
107
homeassistant/components/iotawatt/config_flow.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Config flow for iotawatt integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from iotawattpy.iotawatt import Iotawatt
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries, core, exceptions
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import httpx_client
|
||||
|
||||
from .const import CONNECTION_ERRORS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def validate_input(
|
||||
hass: core.HomeAssistant, data: dict[str, str]
|
||||
) -> dict[str, str]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
iotawatt = Iotawatt(
|
||||
"",
|
||||
data[CONF_HOST],
|
||||
httpx_client.get_async_client(hass),
|
||||
data.get(CONF_USERNAME),
|
||||
data.get(CONF_PASSWORD),
|
||||
)
|
||||
try:
|
||||
is_connected = await iotawatt.connect()
|
||||
except CONNECTION_ERRORS:
|
||||
return {"base": "cannot_connect"}
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return {"base": "unknown"}
|
||||
|
||||
if not is_connected:
|
||||
return {"base": "invalid_auth"}
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for iotawatt."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize."""
|
||||
self._data = {}
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
user_input = {}
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
|
||||
}
|
||||
)
|
||||
if not user_input:
|
||||
return self.async_show_form(step_id="user", data_schema=schema)
|
||||
|
||||
if not (errors := await validate_input(self.hass, user_input)):
|
||||
return self.async_create_entry(title=user_input[CONF_HOST], data=user_input)
|
||||
|
||||
if errors == {"base": "invalid_auth"}:
|
||||
self._data.update(user_input)
|
||||
return await self.async_step_auth()
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
|
||||
async def async_step_auth(self, user_input=None):
|
||||
"""Authenticate user if authentication is enabled on the IoTaWatt device."""
|
||||
if user_input is None:
|
||||
user_input = {}
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
|
||||
): str,
|
||||
}
|
||||
)
|
||||
if not user_input:
|
||||
return self.async_show_form(step_id="auth", data_schema=data_schema)
|
||||
|
||||
data = {**self._data, **user_input}
|
||||
|
||||
if errors := await validate_input(self.hass, data):
|
||||
return self.async_show_form(
|
||||
step_id="auth", data_schema=data_schema, errors=errors
|
||||
)
|
||||
|
||||
return self.async_create_entry(title=data[CONF_HOST], data=data)
|
||||
|
||||
|
||||
class CannotConnect(exceptions.HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(exceptions.HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
12
homeassistant/components/iotawatt/const.py
Normal file
12
homeassistant/components/iotawatt/const.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Constants for the IoTaWatt integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
import httpx
|
||||
|
||||
DOMAIN = "iotawatt"
|
||||
VOLT_AMPERE_REACTIVE = "VAR"
|
||||
VOLT_AMPERE_REACTIVE_HOURS = "VARh"
|
||||
|
||||
CONNECTION_ERRORS = (KeyError, json.JSONDecodeError, httpx.HTTPError)
|
||||
56
homeassistant/components/iotawatt/coordinator.py
Normal file
56
homeassistant/components/iotawatt/coordinator.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""IoTaWatt DataUpdateCoordinator."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from iotawattpy.iotawatt import Iotawatt
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import httpx_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONNECTION_ERRORS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IotawattUpdater(DataUpdateCoordinator):
|
||||
"""Class to manage fetching update data from the IoTaWatt Energy Device."""
|
||||
|
||||
api: Iotawatt | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize IotaWattUpdater object."""
|
||||
self.entry = entry
|
||||
super().__init__(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
name=entry.title,
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
|
||||
async def _async_update_data(self):
|
||||
"""Fetch sensors from IoTaWatt device."""
|
||||
if self.api is None:
|
||||
api = Iotawatt(
|
||||
self.entry.title,
|
||||
self.entry.data[CONF_HOST],
|
||||
httpx_client.get_async_client(self.hass),
|
||||
self.entry.data.get(CONF_USERNAME),
|
||||
self.entry.data.get(CONF_PASSWORD),
|
||||
)
|
||||
try:
|
||||
is_authenticated = await api.connect()
|
||||
except CONNECTION_ERRORS as err:
|
||||
raise UpdateFailed("Connection failed") from err
|
||||
|
||||
if not is_authenticated:
|
||||
raise UpdateFailed("Authentication error")
|
||||
|
||||
self.api = api
|
||||
|
||||
await self.api.update()
|
||||
return self.api.getSensors()
|
||||
13
homeassistant/components/iotawatt/manifest.json
Normal file
13
homeassistant/components/iotawatt/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "iotawatt",
|
||||
"name": "IoTaWatt",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/iotawatt",
|
||||
"requirements": [
|
||||
"iotawattpy==0.0.8"
|
||||
],
|
||||
"codeowners": [
|
||||
"@gtdiehl"
|
||||
],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
218
homeassistant/components/iotawatt/sensor.py
Normal file
218
homeassistant/components/iotawatt/sensor.py
Normal file
@@ -0,0 +1,218 @@
|
||||
"""Support for IoTaWatt Energy monitor."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable
|
||||
|
||||
from iotawattpy.sensor import Sensor
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_ENERGY,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
ELECTRIC_CURRENT_AMPERE,
|
||||
ELECTRIC_POTENTIAL_VOLT,
|
||||
ENERGY_WATT_HOUR,
|
||||
FREQUENCY_HERTZ,
|
||||
PERCENTAGE,
|
||||
POWER_VOLT_AMPERE,
|
||||
POWER_WATT,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import entity, entity_registry, update_coordinator
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
|
||||
from .const import DOMAIN, VOLT_AMPERE_REACTIVE, VOLT_AMPERE_REACTIVE_HOURS
|
||||
from .coordinator import IotawattUpdater
|
||||
|
||||
|
||||
@dataclass
|
||||
class IotaWattSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing IotaWatt sensor entities."""
|
||||
|
||||
value: Callable | None = None
|
||||
|
||||
|
||||
ENTITY_DESCRIPTION_KEY_MAP: dict[str, IotaWattSensorEntityDescription] = {
|
||||
"Amps": IotaWattSensorEntityDescription(
|
||||
"Amps",
|
||||
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_CURRENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"Hz": IotaWattSensorEntityDescription(
|
||||
"Hz",
|
||||
native_unit_of_measurement=FREQUENCY_HERTZ,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
icon="mdi:flash",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"PF": IotaWattSensorEntityDescription(
|
||||
"PF",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_POWER_FACTOR,
|
||||
value=lambda value: value * 100,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"Watts": IotaWattSensorEntityDescription(
|
||||
"Watts",
|
||||
native_unit_of_measurement=POWER_WATT,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_POWER,
|
||||
),
|
||||
"WattHours": IotaWattSensorEntityDescription(
|
||||
"WattHours",
|
||||
native_unit_of_measurement=ENERGY_WATT_HOUR,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
),
|
||||
"VA": IotaWattSensorEntityDescription(
|
||||
"VA",
|
||||
native_unit_of_measurement=POWER_VOLT_AMPERE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
icon="mdi:flash",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"VAR": IotaWattSensorEntityDescription(
|
||||
"VAR",
|
||||
native_unit_of_measurement=VOLT_AMPERE_REACTIVE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
icon="mdi:flash",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"VARh": IotaWattSensorEntityDescription(
|
||||
"VARh",
|
||||
native_unit_of_measurement=VOLT_AMPERE_REACTIVE_HOURS,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
icon="mdi:flash",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"Volts": IotaWattSensorEntityDescription(
|
||||
"Volts",
|
||||
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_VOLTAGE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Add sensors for passed config_entry in HA."""
|
||||
coordinator: IotawattUpdater = hass.data[DOMAIN][config_entry.entry_id]
|
||||
created = set()
|
||||
|
||||
@callback
|
||||
def _create_entity(key: str) -> IotaWattSensor:
|
||||
"""Create a sensor entity."""
|
||||
created.add(key)
|
||||
return IotaWattSensor(
|
||||
coordinator=coordinator,
|
||||
key=key,
|
||||
mac_address=coordinator.data["sensors"][key].hub_mac_address,
|
||||
name=coordinator.data["sensors"][key].getName(),
|
||||
entity_description=ENTITY_DESCRIPTION_KEY_MAP.get(
|
||||
coordinator.data["sensors"][key].getUnit(),
|
||||
IotaWattSensorEntityDescription("base_sensor"),
|
||||
),
|
||||
)
|
||||
|
||||
async_add_entities(_create_entity(key) for key in coordinator.data["sensors"])
|
||||
|
||||
@callback
|
||||
def new_data_received():
|
||||
"""Check for new sensors."""
|
||||
entities = [
|
||||
_create_entity(key)
|
||||
for key in coordinator.data["sensors"]
|
||||
if key not in created
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
coordinator.async_add_listener(new_data_received)
|
||||
|
||||
|
||||
class IotaWattSensor(update_coordinator.CoordinatorEntity, SensorEntity):
|
||||
"""Defines a IoTaWatt Energy Sensor."""
|
||||
|
||||
entity_description: IotaWattSensorEntityDescription
|
||||
_attr_force_update = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator,
|
||||
key,
|
||||
mac_address,
|
||||
name,
|
||||
entity_description: IotaWattSensorEntityDescription,
|
||||
):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator)
|
||||
|
||||
self._key = key
|
||||
data = self._sensor_data
|
||||
if data.getType() == "Input":
|
||||
self._attr_unique_id = (
|
||||
f"{data.hub_mac_address}-input-{data.getChannel()}-{data.getUnit()}"
|
||||
)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def _sensor_data(self) -> Sensor:
|
||||
"""Return sensor data."""
|
||||
return self.coordinator.data["sensors"][self._key]
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Return name of the entity."""
|
||||
return self._sensor_data.getName()
|
||||
|
||||
@property
|
||||
def device_info(self) -> entity.DeviceInfo | None:
|
||||
"""Return device info."""
|
||||
return {
|
||||
"connections": {
|
||||
(CONNECTION_NETWORK_MAC, self._sensor_data.hub_mac_address)
|
||||
},
|
||||
"manufacturer": "IoTaWatt",
|
||||
"model": "IoTaWatt",
|
||||
}
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if self._key not in self.coordinator.data["sensors"]:
|
||||
if self._attr_unique_id:
|
||||
entity_registry.async_get(self.hass).async_remove(self.entity_id)
|
||||
else:
|
||||
self.hass.async_create_task(self.async_remove())
|
||||
return
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the extra state attributes of the entity."""
|
||||
data = self._sensor_data
|
||||
attrs = {"type": data.getType()}
|
||||
if attrs["type"] == "Input":
|
||||
attrs["channel"] = data.getChannel()
|
||||
|
||||
return attrs
|
||||
|
||||
@property
|
||||
def native_value(self) -> entity.StateType:
|
||||
"""Return the state of the sensor."""
|
||||
if func := self.entity_description.value:
|
||||
return func(self._sensor_data.getValue())
|
||||
|
||||
return self._sensor_data.getValue()
|
||||
23
homeassistant/components/iotawatt/strings.json
Normal file
23
homeassistant/components/iotawatt/strings.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
}
|
||||
},
|
||||
"auth": {
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"description": "The IoTawatt device requires authentication. Please enter the username and password and click the Submit button."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
24
homeassistant/components/iotawatt/translations/en.json
Normal file
24
homeassistant/components/iotawatt/translations/en.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect",
|
||||
"invalid_auth": "Invalid authentication",
|
||||
"unknown": "Unexpected error"
|
||||
},
|
||||
"step": {
|
||||
"auth": {
|
||||
"data": {
|
||||
"password": "Password",
|
||||
"username": "Username"
|
||||
},
|
||||
"description": "The IoTawatt device requires authentication. Please enter the username and password and click the Submit button."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "iotawatt"
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "IQVIA",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/iqvia",
|
||||
"requirements": ["numpy==1.21.1", "pyiqvia==1.0.0"],
|
||||
"requirements": ["numpy==1.21.1", "pyiqvia==1.1.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -470,7 +470,7 @@ class LIFXLight(LightEntity):
|
||||
|
||||
model = product_map.get(self.bulb.product) or self.bulb.product
|
||||
if model is not None:
|
||||
info["model"] = model
|
||||
info["model"] = str(model)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LIFX",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lifx",
|
||||
"requirements": ["aiolifx==0.6.9", "aiolifx_effects==0.2.2"],
|
||||
"requirements": ["aiolifx==0.6.10", "aiolifx_effects==0.2.2"],
|
||||
"homekit": {
|
||||
"models": ["LIFX"]
|
||||
},
|
||||
|
||||
@@ -445,7 +445,11 @@ async def async_setup(hass, config): # noqa: C901
|
||||
)
|
||||
|
||||
# If both white and brightness are specified, override white
|
||||
if ATTR_WHITE in params and COLOR_MODE_WHITE in supported_color_modes:
|
||||
if (
|
||||
supported_color_modes
|
||||
and ATTR_WHITE in params
|
||||
and COLOR_MODE_WHITE in supported_color_modes
|
||||
):
|
||||
params[ATTR_WHITE] = params.pop(ATTR_BRIGHTNESS, params[ATTR_WHITE])
|
||||
|
||||
# Remove deprecated white value if the light supports color mode
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Litter-Robot",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/litterrobot",
|
||||
"requirements": ["pylitterbot==2021.8.0"],
|
||||
"requirements": ["pylitterbot==2021.8.1"],
|
||||
"codeowners": ["@natekspencer"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -243,7 +243,7 @@ class ModbusHub:
|
||||
self._msg_wait = 0
|
||||
|
||||
def _log_error(self, text: str, error_state=True):
|
||||
log_text = f"Pymodbus: {text}"
|
||||
log_text = f"Pymodbus: {self.name}: {text}"
|
||||
if self._in_error:
|
||||
_LOGGER.debug(log_text)
|
||||
else:
|
||||
|
||||
@@ -95,8 +95,7 @@ class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_hassio(self, discovery_info):
|
||||
"""Receive a Hass.io discovery."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
|
||||
self._hassio_discovery = discovery_info
|
||||
|
||||
|
||||
@@ -53,10 +53,34 @@ MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset(
|
||||
|
||||
DEFAULT_NAME = "MQTT Sensor"
|
||||
DEFAULT_FORCE_UPDATE = False
|
||||
|
||||
|
||||
def validate_options(conf):
|
||||
"""Validate options.
|
||||
|
||||
If last reset topic is present it must be same as the state topic.
|
||||
"""
|
||||
if (
|
||||
CONF_LAST_RESET_TOPIC in conf
|
||||
and CONF_STATE_TOPIC in conf
|
||||
and conf[CONF_LAST_RESET_TOPIC] != conf[CONF_STATE_TOPIC]
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"'%s' must be same as '%s'", CONF_LAST_RESET_TOPIC, CONF_STATE_TOPIC
|
||||
)
|
||||
|
||||
if CONF_LAST_RESET_TOPIC in conf and CONF_LAST_RESET_VALUE_TEMPLATE not in conf:
|
||||
_LOGGER.warning(
|
||||
"'%s' must be set if '%s' is set",
|
||||
CONF_LAST_RESET_VALUE_TEMPLATE,
|
||||
CONF_LAST_RESET_TOPIC,
|
||||
)
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
# Deprecated, remove in Home Assistant 2021.11
|
||||
cv.deprecated(CONF_LAST_RESET_TOPIC),
|
||||
cv.deprecated(CONF_LAST_RESET_VALUE_TEMPLATE),
|
||||
mqtt.MQTT_RO_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
@@ -69,6 +93,7 @@ PLATFORM_SCHEMA = vol.All(
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
}
|
||||
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
|
||||
validate_options,
|
||||
)
|
||||
|
||||
|
||||
@@ -132,10 +157,7 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
"""(Re)Subscribe to topics."""
|
||||
topics = {}
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def message_received(msg):
|
||||
"""Handle new MQTT messages."""
|
||||
def _update_state(msg):
|
||||
payload = msg.payload
|
||||
# auto-expire enabled?
|
||||
expire_after = self._config.get(CONF_EXPIRE_AFTER)
|
||||
@@ -164,18 +186,8 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
variables=variables,
|
||||
)
|
||||
self._state = payload
|
||||
self.async_write_ha_state()
|
||||
|
||||
topics["state_topic"] = {
|
||||
"topic": self._config[CONF_STATE_TOPIC],
|
||||
"msg_callback": message_received,
|
||||
"qos": self._config[CONF_QOS],
|
||||
}
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def last_reset_message_received(msg):
|
||||
"""Handle new last_reset messages."""
|
||||
def _update_last_reset(msg):
|
||||
payload = msg.payload
|
||||
|
||||
template = self._config.get(CONF_LAST_RESET_VALUE_TEMPLATE)
|
||||
@@ -198,9 +210,36 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
_LOGGER.warning(
|
||||
"Invalid last_reset message '%s' from '%s'", msg.payload, msg.topic
|
||||
)
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def message_received(msg):
|
||||
"""Handle new MQTT messages."""
|
||||
_update_state(msg)
|
||||
if CONF_LAST_RESET_VALUE_TEMPLATE in self._config and (
|
||||
CONF_LAST_RESET_TOPIC not in self._config
|
||||
or self._config[CONF_LAST_RESET_TOPIC] == self._config[CONF_STATE_TOPIC]
|
||||
):
|
||||
_update_last_reset(msg)
|
||||
self.async_write_ha_state()
|
||||
|
||||
if CONF_LAST_RESET_TOPIC in self._config:
|
||||
topics["state_topic"] = {
|
||||
"topic": self._config[CONF_STATE_TOPIC],
|
||||
"msg_callback": message_received,
|
||||
"qos": self._config[CONF_QOS],
|
||||
}
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def last_reset_message_received(msg):
|
||||
"""Handle new last_reset messages."""
|
||||
_update_last_reset(msg)
|
||||
self.async_write_ha_state()
|
||||
|
||||
if (
|
||||
CONF_LAST_RESET_TOPIC in self._config
|
||||
and self._config[CONF_LAST_RESET_TOPIC] != self._config[CONF_STATE_TOPIC]
|
||||
):
|
||||
topics["last_reset_topic"] = {
|
||||
"topic": self._config[CONF_LAST_RESET_TOPIC],
|
||||
"msg_callback": last_reset_message_received,
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Service is already configured",
|
||||
"single_instance_allowed": "Already configured. Only a single configuration possible."
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -14,7 +14,11 @@ from getmac import get_mac_address
|
||||
from mac_vendor_lookup import AsyncMacLookup
|
||||
from nmap import PortScanner, PortScannerError
|
||||
|
||||
from homeassistant.components.device_tracker.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.components.device_tracker.const import (
|
||||
CONF_CONSIDER_HOME,
|
||||
CONF_SCAN_INTERVAL,
|
||||
DEFAULT_CONSIDER_HOME,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EXCLUDE, CONF_HOSTS, EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import CoreState, HomeAssistant, callback
|
||||
@@ -37,7 +41,6 @@ from .const import (
|
||||
# Some version of nmap will fail with 'Assertion failed: htn.toclock_running == true (Target.cc: stopTimeOutClock: 503)\n'
|
||||
NMAP_TRANSIENT_FAILURE: Final = "Assertion failed: htn.toclock_running == true"
|
||||
MAX_SCAN_ATTEMPTS: Final = 16
|
||||
OFFLINE_SCANS_TO_MARK_UNAVAILABLE: Final = 3
|
||||
|
||||
|
||||
def short_hostname(hostname: str) -> str:
|
||||
@@ -65,7 +68,7 @@ class NmapDevice:
|
||||
manufacturer: str
|
||||
reason: str
|
||||
last_update: datetime
|
||||
offline_scans: int
|
||||
first_offline: datetime | None
|
||||
|
||||
|
||||
class NmapTrackedDevices:
|
||||
@@ -137,6 +140,7 @@ class NmapDeviceScanner:
|
||||
"""Initialize the scanner."""
|
||||
self.devices = devices
|
||||
self.home_interval = None
|
||||
self.consider_home = DEFAULT_CONSIDER_HOME
|
||||
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
@@ -170,6 +174,10 @@ class NmapDeviceScanner:
|
||||
self.home_interval = timedelta(
|
||||
minutes=cv.positive_int(config[CONF_HOME_INTERVAL])
|
||||
)
|
||||
if config.get(CONF_CONSIDER_HOME):
|
||||
self.consider_home = timedelta(
|
||||
seconds=cv.positive_float(config[CONF_CONSIDER_HOME])
|
||||
)
|
||||
self._scan_lock = asyncio.Lock()
|
||||
if self._hass.state == CoreState.running:
|
||||
await self._async_start_scanner()
|
||||
@@ -320,16 +328,35 @@ class NmapDeviceScanner:
|
||||
return result
|
||||
|
||||
@callback
|
||||
def _async_increment_device_offline(self, ipv4, reason):
|
||||
def _async_device_offline(self, ipv4: str, reason: str, now: datetime) -> None:
|
||||
"""Mark an IP offline."""
|
||||
if not (formatted_mac := self.devices.ipv4_last_mac.get(ipv4)):
|
||||
return
|
||||
if not (device := self.devices.tracked.get(formatted_mac)):
|
||||
# Device was unloaded
|
||||
return
|
||||
device.offline_scans += 1
|
||||
if device.offline_scans < OFFLINE_SCANS_TO_MARK_UNAVAILABLE:
|
||||
if not device.first_offline:
|
||||
_LOGGER.debug(
|
||||
"Setting first_offline for %s (%s) to: %s", ipv4, formatted_mac, now
|
||||
)
|
||||
device.first_offline = now
|
||||
return
|
||||
if device.first_offline + self.consider_home > now:
|
||||
_LOGGER.debug(
|
||||
"Device %s (%s) has NOT been offline (first offline at: %s) long enough to be considered not home: %s",
|
||||
ipv4,
|
||||
formatted_mac,
|
||||
device.first_offline,
|
||||
self.consider_home,
|
||||
)
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"Device %s (%s) has been offline (first offline at: %s) long enough to be considered not home: %s",
|
||||
ipv4,
|
||||
formatted_mac,
|
||||
device.first_offline,
|
||||
self.consider_home,
|
||||
)
|
||||
device.reason = reason
|
||||
async_dispatcher_send(self._hass, signal_device_update(formatted_mac), False)
|
||||
del self.devices.ipv4_last_mac[ipv4]
|
||||
@@ -347,7 +374,7 @@ class NmapDeviceScanner:
|
||||
status = info["status"]
|
||||
reason = status["reason"]
|
||||
if status["state"] != "up":
|
||||
self._async_increment_device_offline(ipv4, reason)
|
||||
self._async_device_offline(ipv4, reason, now)
|
||||
continue
|
||||
# Mac address only returned if nmap ran as root
|
||||
mac = info["addresses"].get(
|
||||
@@ -356,19 +383,11 @@ class NmapDeviceScanner:
|
||||
partial(get_mac_address, ip=ipv4)
|
||||
)
|
||||
if mac is None:
|
||||
self._async_increment_device_offline(ipv4, "No MAC address found")
|
||||
self._async_device_offline(ipv4, "No MAC address found", now)
|
||||
_LOGGER.info("No MAC address found for %s", ipv4)
|
||||
continue
|
||||
|
||||
formatted_mac = format_mac(mac)
|
||||
new = formatted_mac not in devices.tracked
|
||||
if (
|
||||
new
|
||||
and formatted_mac not in devices.tracked
|
||||
and formatted_mac not in self._known_mac_addresses
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
devices.config_entry_owner.setdefault(formatted_mac, entry_id)
|
||||
!= entry_id
|
||||
@@ -379,9 +398,10 @@ class NmapDeviceScanner:
|
||||
vendor = info.get("vendor", {}).get(mac) or self._async_get_vendor(mac)
|
||||
name = human_readable_name(hostname, vendor, mac)
|
||||
device = NmapDevice(
|
||||
formatted_mac, hostname, name, ipv4, vendor, reason, now, 0
|
||||
formatted_mac, hostname, name, ipv4, vendor, reason, now, None
|
||||
)
|
||||
|
||||
new = formatted_mac not in devices.tracked
|
||||
devices.tracked[formatted_mac] = device
|
||||
devices.ipv4_last_mac[ipv4] = formatted_mac
|
||||
self._last_results.append(device)
|
||||
|
||||
@@ -8,7 +8,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import network
|
||||
from homeassistant.components.device_tracker.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.components.device_tracker.const import (
|
||||
CONF_CONSIDER_HOME,
|
||||
CONF_SCAN_INTERVAL,
|
||||
DEFAULT_CONSIDER_HOME,
|
||||
)
|
||||
from homeassistant.components.network.const import MDNS_TARGET_IP
|
||||
from homeassistant.config_entries import ConfigEntry, OptionsFlow
|
||||
from homeassistant.const import CONF_EXCLUDE, CONF_HOSTS
|
||||
@@ -24,6 +28,8 @@ from .const import (
|
||||
TRACKER_SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
MAX_SCAN_INTERVAL = 3600
|
||||
MAX_CONSIDER_HOME = MAX_SCAN_INTERVAL * 6
|
||||
DEFAULT_NETWORK_PREFIX = 24
|
||||
|
||||
|
||||
@@ -116,7 +122,12 @@ async def _async_build_schema_with_user_input(
|
||||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=user_input.get(CONF_SCAN_INTERVAL, TRACKER_SCAN_INTERVAL),
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=10, max=3600)),
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=10, max=MAX_SCAN_INTERVAL)),
|
||||
vol.Optional(
|
||||
CONF_CONSIDER_HOME,
|
||||
default=user_input.get(CONF_CONSIDER_HOME)
|
||||
or DEFAULT_CONSIDER_HOME.total_seconds(),
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=1, max=MAX_CONSIDER_HOME)),
|
||||
}
|
||||
)
|
||||
return vol.Schema(schema)
|
||||
|
||||
@@ -12,7 +12,11 @@ from homeassistant.components.device_tracker import (
|
||||
SOURCE_TYPE_ROUTER,
|
||||
)
|
||||
from homeassistant.components.device_tracker.config_entry import ScannerEntity
|
||||
from homeassistant.components.device_tracker.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.components.device_tracker.const import (
|
||||
CONF_CONSIDER_HOME,
|
||||
CONF_SCAN_INTERVAL,
|
||||
DEFAULT_CONSIDER_HOME,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_EXCLUDE, CONF_HOSTS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -38,6 +42,9 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_HOSTS): cv.ensure_list,
|
||||
vol.Required(CONF_HOME_INTERVAL, default=0): cv.positive_int,
|
||||
vol.Required(
|
||||
CONF_CONSIDER_HOME, default=DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
): cv.time_period,
|
||||
vol.Optional(CONF_EXCLUDE, default=[]): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_OPTIONS, default=DEFAULT_OPTIONS): cv.string,
|
||||
}
|
||||
@@ -53,9 +60,15 @@ async def async_get_scanner(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
else:
|
||||
scan_interval = TRACKER_SCAN_INTERVAL
|
||||
|
||||
if CONF_CONSIDER_HOME in validated_config:
|
||||
consider_home = validated_config[CONF_CONSIDER_HOME].total_seconds()
|
||||
else:
|
||||
consider_home = DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
|
||||
import_config = {
|
||||
CONF_HOSTS: ",".join(validated_config[CONF_HOSTS]),
|
||||
CONF_HOME_INTERVAL: validated_config[CONF_HOME_INTERVAL],
|
||||
CONF_CONSIDER_HOME: consider_home,
|
||||
CONF_EXCLUDE: ",".join(validated_config[CONF_EXCLUDE]),
|
||||
CONF_OPTIONS: validated_config[CONF_OPTIONS],
|
||||
CONF_SCAN_INTERVAL: scan_interval,
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
"data": {
|
||||
"hosts": "[%key:component::nmap_tracker::config::step::user::data::hosts%]",
|
||||
"home_interval": "[%key:component::nmap_tracker::config::step::user::data::home_interval%]",
|
||||
"consider_home": "Seconds to wait till marking a device tracker as not home after not being seen.",
|
||||
"exclude": "[%key:component::nmap_tracker::config::step::user::data::exclude%]",
|
||||
"scan_options": "[%key:component::nmap_tracker::config::step::user::data::scan_options%]",
|
||||
"interval_seconds": "Scan interval"
|
||||
|
||||
@@ -25,12 +25,12 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"consider_home": "Seconds to wait till marking a device tracker as not home after not being seen.",
|
||||
"exclude": "Network addresses (comma seperated) to exclude from scanning",
|
||||
"home_interval": "Minimum number of minutes between scans of active devices (preserve battery)",
|
||||
"hosts": "Network addresses (comma seperated) to scan",
|
||||
"interval_seconds": "Scan interval",
|
||||
"scan_options": "Raw configurable scan options for Nmap",
|
||||
"track_new_devices": "Track new devices"
|
||||
"scan_options": "Raw configurable scan options for Nmap"
|
||||
},
|
||||
"description": "Configure hosts to be scanned by Nmap. Network address and excludes can be IP Addresses (192.168.1.1), IP Networks (192.168.0.0/24) or IP Ranges (192.168.1.0-32)."
|
||||
}
|
||||
|
||||
@@ -130,6 +130,7 @@ class ONVIFDevice:
|
||||
err,
|
||||
)
|
||||
self.available = False
|
||||
await self.device.close()
|
||||
except Fault as err:
|
||||
LOGGER.error(
|
||||
"Couldn't connect to camera '%s', please verify "
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
"domain": "onvif",
|
||||
"name": "ONVIF",
|
||||
"documentation": "https://www.home-assistant.io/integrations/onvif",
|
||||
"requirements": [
|
||||
"onvif-zeep-async==1.0.0",
|
||||
"WSDiscovery==2.0.0",
|
||||
"zeep[async]==4.0.0"
|
||||
],
|
||||
"requirements": ["onvif-zeep-async==1.2.0", "WSDiscovery==2.0.0"],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": ["@hunterjm"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -66,6 +66,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
config_entry.data.get(CONF_LONGITUDE, hass.config.longitude),
|
||||
altitude=config_entry.data.get(CONF_ELEVATION, hass.config.elevation),
|
||||
session=websession,
|
||||
logger=LOGGER,
|
||||
),
|
||||
)
|
||||
await openuv.async_update()
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "OpenUV",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/openuv",
|
||||
"requirements": ["pyopenuv==2.1.0"],
|
||||
"requirements": ["pyopenuv==2.2.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "P1 Monitor",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/p1_monitor",
|
||||
"requirements": ["p1monitor==0.2.0"],
|
||||
"requirements": ["p1monitor==1.0.0"],
|
||||
"codeowners": ["@klaasnicolaas"],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "local_polling"
|
||||
|
||||
@@ -192,33 +192,33 @@ SENSORS: dict[
|
||||
),
|
||||
SERVICE_SETTINGS: (
|
||||
SensorEntityDescription(
|
||||
key="gas_consumption_tariff",
|
||||
name="Gas Consumption - Tariff",
|
||||
key="gas_consumption_price",
|
||||
name="Gas Consumption Price",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=DEVICE_CLASS_MONETARY,
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_consumption_low_tariff",
|
||||
name="Energy Consumption - Low Tariff",
|
||||
key="energy_consumption_price_low",
|
||||
name="Energy Consumption Price - Low",
|
||||
device_class=DEVICE_CLASS_MONETARY,
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_consumption_high_tariff",
|
||||
name="Energy Consumption - High Tariff",
|
||||
key="energy_consumption_price_high",
|
||||
name="Energy Consumption Price - High",
|
||||
device_class=DEVICE_CLASS_MONETARY,
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_production_low_tariff",
|
||||
name="Energy Production - Low Tariff",
|
||||
key="energy_production_price_low",
|
||||
name="Energy Production Price - Low",
|
||||
device_class=DEVICE_CLASS_MONETARY,
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_production_high_tariff",
|
||||
name="Energy Production - High Tariff",
|
||||
key="energy_production_price_high",
|
||||
name="Energy Production Price - High",
|
||||
device_class=DEVICE_CLASS_MONETARY,
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
|
||||
@@ -91,7 +91,7 @@ _DRIVE_MON_COND = {
|
||||
"mdi:checkbox-marked-circle-outline",
|
||||
None,
|
||||
],
|
||||
"drive_temp": ["Temperature", TEMP_CELSIUS, None, None, DEVICE_CLASS_TEMPERATURE],
|
||||
"drive_temp": ["Temperature", TEMP_CELSIUS, None, DEVICE_CLASS_TEMPERATURE],
|
||||
}
|
||||
_VOLUME_MON_COND = {
|
||||
"volume_size_used": ["Used Space", DATA_GIBIBYTES, "mdi:chart-pie", None],
|
||||
|
||||
@@ -38,21 +38,22 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SENSORS = (
|
||||
SensorEntityDescription(
|
||||
key="zigbee:InstantaneousDemand",
|
||||
name="Meter Power Demand",
|
||||
# We can drop the "Eagle-200" part of the name in HA 2021.12
|
||||
name="Eagle-200 Meter Power Demand",
|
||||
native_unit_of_measurement=POWER_KILO_WATT,
|
||||
device_class=DEVICE_CLASS_POWER,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="zigbee:CurrentSummationDelivered",
|
||||
name="Total Meter Energy Delivered",
|
||||
name="Eagle-200 Total Meter Energy Delivered",
|
||||
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="zigbee:CurrentSummationReceived",
|
||||
name="Total Meter Energy Received",
|
||||
name="Eagle-200 Total Meter Energy Received",
|
||||
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
|
||||
@@ -59,7 +59,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
try:
|
||||
await client.async_get_next_pickup_event()
|
||||
await client.async_get_pickup_events()
|
||||
except RecollectError as err:
|
||||
LOGGER.error("Error during setup of integration: %s", err)
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "ReCollect Waste",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/recollect_waste",
|
||||
"requirements": ["aiorecollect==1.0.7"],
|
||||
"requirements": ["aiorecollect==1.0.8"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Support for ReCollect Waste sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, time
|
||||
|
||||
from aiorecollect.client import PickupType
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -74,6 +76,12 @@ async def async_setup_platform(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_utc_midnight(target_date: date) -> datetime:
|
||||
"""Get UTC midnight for a given date."""
|
||||
return as_utc(datetime.combine(target_date, time(0)))
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
@@ -124,7 +132,9 @@ class ReCollectWasteSensor(CoordinatorEntity, SensorEntity):
|
||||
ATTR_NEXT_PICKUP_TYPES: async_get_pickup_type_names(
|
||||
self._entry, next_pickup_event.pickup_types
|
||||
),
|
||||
ATTR_NEXT_PICKUP_DATE: as_utc(next_pickup_event.date).isoformat(),
|
||||
ATTR_NEXT_PICKUP_DATE: async_get_utc_midnight(
|
||||
next_pickup_event.date
|
||||
).isoformat(),
|
||||
}
|
||||
)
|
||||
self._attr_native_value = as_utc(pickup_event.date).isoformat()
|
||||
self._attr_native_value = async_get_utc_midnight(pickup_event.date).isoformat()
|
||||
|
||||
@@ -70,7 +70,7 @@ DOUBLE_TYPE = (
|
||||
Float()
|
||||
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql")
|
||||
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
|
||||
.with_variant(postgresql.DOUBLE_PRECISION, "postgresql")
|
||||
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
|
||||
)
|
||||
|
||||
|
||||
@@ -267,6 +267,7 @@ class Statistics(Base): # type: ignore
|
||||
class StatisticMetaData(TypedDict, total=False):
|
||||
"""Statistic meta data class."""
|
||||
|
||||
statistic_id: str
|
||||
unit_of_measurement: str | None
|
||||
has_mean: bool
|
||||
has_sum: bool
|
||||
|
||||
@@ -53,6 +53,13 @@ QUERY_STATISTIC_META = [
|
||||
StatisticsMeta.id,
|
||||
StatisticsMeta.statistic_id,
|
||||
StatisticsMeta.unit_of_measurement,
|
||||
StatisticsMeta.has_mean,
|
||||
StatisticsMeta.has_sum,
|
||||
]
|
||||
|
||||
QUERY_STATISTIC_META_ID = [
|
||||
StatisticsMeta.id,
|
||||
StatisticsMeta.statistic_id,
|
||||
]
|
||||
|
||||
STATISTICS_BAKERY = "recorder_statistics_bakery"
|
||||
@@ -124,33 +131,61 @@ def _get_metadata_ids(
|
||||
) -> list[str]:
|
||||
"""Resolve metadata_id for a list of statistic_ids."""
|
||||
baked_query = hass.data[STATISTICS_META_BAKERY](
|
||||
lambda session: session.query(*QUERY_STATISTIC_META)
|
||||
lambda session: session.query(*QUERY_STATISTIC_META_ID)
|
||||
)
|
||||
baked_query += lambda q: q.filter(
|
||||
StatisticsMeta.statistic_id.in_(bindparam("statistic_ids"))
|
||||
)
|
||||
result = execute(baked_query(session).params(statistic_ids=statistic_ids))
|
||||
|
||||
return [id for id, _, _ in result] if result else []
|
||||
return [id for id, _ in result] if result else []
|
||||
|
||||
|
||||
def _get_or_add_metadata_id(
|
||||
def _update_or_add_metadata(
|
||||
hass: HomeAssistant,
|
||||
session: scoped_session,
|
||||
statistic_id: str,
|
||||
metadata: StatisticMetaData,
|
||||
new_metadata: StatisticMetaData,
|
||||
) -> str:
|
||||
"""Get metadata_id for a statistic_id, add if it doesn't exist."""
|
||||
metadata_id = _get_metadata_ids(hass, session, [statistic_id])
|
||||
if not metadata_id:
|
||||
unit = metadata["unit_of_measurement"]
|
||||
has_mean = metadata["has_mean"]
|
||||
has_sum = metadata["has_sum"]
|
||||
old_metadata_dict = _get_metadata(hass, session, [statistic_id], None)
|
||||
if not old_metadata_dict:
|
||||
unit = new_metadata["unit_of_measurement"]
|
||||
has_mean = new_metadata["has_mean"]
|
||||
has_sum = new_metadata["has_sum"]
|
||||
session.add(
|
||||
StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean, has_sum)
|
||||
)
|
||||
metadata_id = _get_metadata_ids(hass, session, [statistic_id])
|
||||
return metadata_id[0]
|
||||
metadata_ids = _get_metadata_ids(hass, session, [statistic_id])
|
||||
_LOGGER.debug(
|
||||
"Added new statistics metadata for %s, new_metadata: %s",
|
||||
statistic_id,
|
||||
new_metadata,
|
||||
)
|
||||
return metadata_ids[0]
|
||||
|
||||
metadata_id, old_metadata = next(iter(old_metadata_dict.items()))
|
||||
if (
|
||||
old_metadata["has_mean"] != new_metadata["has_mean"]
|
||||
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
||||
or old_metadata["unit_of_measurement"] != new_metadata["unit_of_measurement"]
|
||||
):
|
||||
session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update(
|
||||
{
|
||||
StatisticsMeta.has_mean: new_metadata["has_mean"],
|
||||
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
||||
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
||||
},
|
||||
synchronize_session=False,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Updated statistics metadata for %s, old_metadata: %s, new_metadata: %s",
|
||||
statistic_id,
|
||||
old_metadata,
|
||||
new_metadata,
|
||||
)
|
||||
|
||||
return metadata_id
|
||||
|
||||
|
||||
@retryable_database_job("statistics")
|
||||
@@ -177,7 +212,7 @@ def compile_statistics(instance: Recorder, start: datetime) -> bool:
|
||||
with session_scope(session=instance.get_session()) as session: # type: ignore
|
||||
for stats in platform_stats:
|
||||
for entity_id, stat in stats.items():
|
||||
metadata_id = _get_or_add_metadata_id(
|
||||
metadata_id = _update_or_add_metadata(
|
||||
instance.hass, session, entity_id, stat["meta"]
|
||||
)
|
||||
session.add(Statistics.from_stats(metadata_id, start, stat["stat"]))
|
||||
@@ -191,14 +226,19 @@ def _get_metadata(
|
||||
session: scoped_session,
|
||||
statistic_ids: list[str] | None,
|
||||
statistic_type: str | None,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
) -> dict[str, StatisticMetaData]:
|
||||
"""Fetch meta data."""
|
||||
|
||||
def _meta(metas: list, wanted_metadata_id: str) -> dict[str, str] | None:
|
||||
meta = None
|
||||
for metadata_id, statistic_id, unit in metas:
|
||||
def _meta(metas: list, wanted_metadata_id: str) -> StatisticMetaData | None:
|
||||
meta: StatisticMetaData | None = None
|
||||
for metadata_id, statistic_id, unit, has_mean, has_sum in metas:
|
||||
if metadata_id == wanted_metadata_id:
|
||||
meta = {"unit_of_measurement": unit, "statistic_id": statistic_id}
|
||||
meta = {
|
||||
"statistic_id": statistic_id,
|
||||
"unit_of_measurement": unit,
|
||||
"has_mean": has_mean,
|
||||
"has_sum": has_sum,
|
||||
}
|
||||
return meta
|
||||
|
||||
baked_query = hass.data[STATISTICS_META_BAKERY](
|
||||
@@ -219,7 +259,7 @@ def _get_metadata(
|
||||
return {}
|
||||
|
||||
metadata_ids = [metadata[0] for metadata in result]
|
||||
metadata = {}
|
||||
metadata: dict[str, StatisticMetaData] = {}
|
||||
for _id in metadata_ids:
|
||||
meta = _meta(result, _id)
|
||||
if meta:
|
||||
@@ -230,7 +270,7 @@ def _get_metadata(
|
||||
def get_metadata(
|
||||
hass: HomeAssistant,
|
||||
statistic_id: str,
|
||||
) -> dict[str, str] | None:
|
||||
) -> StatisticMetaData | None:
|
||||
"""Return metadata for a statistic_id."""
|
||||
statistic_ids = [statistic_id]
|
||||
with session_scope(hass=hass) as session:
|
||||
@@ -255,7 +295,7 @@ def _configured_unit(unit: str, units: UnitSystem) -> str:
|
||||
|
||||
def list_statistic_ids(
|
||||
hass: HomeAssistant, statistic_type: str | None = None
|
||||
) -> list[dict[str, str] | None]:
|
||||
) -> list[StatisticMetaData | None]:
|
||||
"""Return statistic_ids and meta data."""
|
||||
units = hass.config.units
|
||||
statistic_ids = {}
|
||||
@@ -263,7 +303,9 @@ def list_statistic_ids(
|
||||
metadata = _get_metadata(hass, session, None, statistic_type)
|
||||
|
||||
for meta in metadata.values():
|
||||
unit = _configured_unit(meta["unit_of_measurement"], units)
|
||||
unit = meta["unit_of_measurement"]
|
||||
if unit is not None:
|
||||
unit = _configured_unit(unit, units)
|
||||
meta["unit_of_measurement"] = unit
|
||||
|
||||
statistic_ids = {
|
||||
@@ -277,7 +319,8 @@ def list_statistic_ids(
|
||||
platform_statistic_ids = platform.list_statistic_ids(hass, statistic_type)
|
||||
|
||||
for statistic_id, unit in platform_statistic_ids.items():
|
||||
unit = _configured_unit(unit, units)
|
||||
if unit is not None:
|
||||
unit = _configured_unit(unit, units)
|
||||
platform_statistic_ids[statistic_id] = unit
|
||||
|
||||
statistic_ids = {**statistic_ids, **platform_statistic_ids}
|
||||
@@ -367,7 +410,7 @@ def _sorted_statistics_to_dict(
|
||||
hass: HomeAssistant,
|
||||
stats: list,
|
||||
statistic_ids: list[str] | None,
|
||||
metadata: dict[str, dict[str, str]],
|
||||
metadata: dict[str, StatisticMetaData],
|
||||
) -> dict[str, list[dict]]:
|
||||
"""Convert SQL results into JSON friendly data structure."""
|
||||
result: dict = defaultdict(list)
|
||||
|
||||
@@ -52,6 +52,7 @@ class RingCam(RingEntityMixin, Camera):
|
||||
self._last_event = None
|
||||
self._last_video_id = None
|
||||
self._video_url = None
|
||||
self._image = None
|
||||
self._expires_at = dt_util.utcnow() - FORCE_REFRESH_INTERVAL
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
@@ -80,6 +81,7 @@ class RingCam(RingEntityMixin, Camera):
|
||||
self._last_event = None
|
||||
self._last_video_id = None
|
||||
self._video_url = None
|
||||
self._image = None
|
||||
self._expires_at = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -106,12 +108,18 @@ class RingCam(RingEntityMixin, Camera):
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image response from the camera."""
|
||||
if self._video_url is None:
|
||||
return
|
||||
if self._image is None and self._video_url:
|
||||
image = await ffmpeg.async_get_image(
|
||||
self.hass,
|
||||
self._video_url,
|
||||
width=width,
|
||||
height=height,
|
||||
)
|
||||
|
||||
return await ffmpeg.async_get_image(
|
||||
self.hass, self._video_url, width=width, height=height
|
||||
)
|
||||
if image:
|
||||
self._image = image
|
||||
|
||||
return self._image
|
||||
|
||||
async def handle_async_mjpeg_stream(self, request):
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
@@ -144,6 +152,9 @@ class RingCam(RingEntityMixin, Camera):
|
||||
if self._last_video_id == self._last_event["id"] and utcnow <= self._expires_at:
|
||||
return
|
||||
|
||||
if self._last_video_id != self._last_event["id"]:
|
||||
self._image = None
|
||||
|
||||
try:
|
||||
video_url = await self.hass.async_add_executor_job(
|
||||
self._device.recording_url, self._last_event["id"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "ring",
|
||||
"name": "Ring",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ring",
|
||||
"requirements": ["ring_doorbell==0.6.2"],
|
||||
"requirements": ["ring_doorbell==0.7.1"],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": ["@balloob"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -23,6 +23,16 @@ CONSUMPTION_NAME = "Usage"
|
||||
CONSUMPTION_ID = "usage"
|
||||
PRODUCTION_NAME = "Production"
|
||||
PRODUCTION_ID = "production"
|
||||
PRODUCTION_PCT_NAME = "Net Production Percentage"
|
||||
PRODUCTION_PCT_ID = "production_pct"
|
||||
NET_PRODUCTION_NAME = "Net Production"
|
||||
NET_PRODUCTION_ID = "net_production"
|
||||
TO_GRID_NAME = "To Grid"
|
||||
TO_GRID_ID = "to_grid"
|
||||
FROM_GRID_NAME = "From Grid"
|
||||
FROM_GRID_ID = "from_grid"
|
||||
SOLAR_POWERED_NAME = "Solar Powered Percentage"
|
||||
SOLAR_POWERED_ID = "solar_powered"
|
||||
|
||||
ICON = "mdi:flash"
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.const import (
|
||||
DEVICE_CLASS_POWER,
|
||||
ELECTRIC_POTENTIAL_VOLT,
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
PERCENTAGE,
|
||||
POWER_WATT,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
@@ -22,15 +23,25 @@ from .const import (
|
||||
CONSUMPTION_ID,
|
||||
CONSUMPTION_NAME,
|
||||
DOMAIN,
|
||||
FROM_GRID_ID,
|
||||
FROM_GRID_NAME,
|
||||
ICON,
|
||||
MDI_ICONS,
|
||||
NET_PRODUCTION_ID,
|
||||
NET_PRODUCTION_NAME,
|
||||
PRODUCTION_ID,
|
||||
PRODUCTION_NAME,
|
||||
PRODUCTION_PCT_ID,
|
||||
PRODUCTION_PCT_NAME,
|
||||
SENSE_DATA,
|
||||
SENSE_DEVICE_UPDATE,
|
||||
SENSE_DEVICES_DATA,
|
||||
SENSE_DISCOVERED_DEVICES_DATA,
|
||||
SENSE_TRENDS_COORDINATOR,
|
||||
SOLAR_POWERED_ID,
|
||||
SOLAR_POWERED_NAME,
|
||||
TO_GRID_ID,
|
||||
TO_GRID_NAME,
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +66,16 @@ TRENDS_SENSOR_TYPES = {
|
||||
}
|
||||
|
||||
# Production/consumption variants
|
||||
SENSOR_VARIANTS = [PRODUCTION_ID, CONSUMPTION_ID]
|
||||
SENSOR_VARIANTS = [(PRODUCTION_ID, PRODUCTION_NAME), (CONSUMPTION_ID, CONSUMPTION_NAME)]
|
||||
|
||||
# Trend production/consumption variants
|
||||
TREND_SENSOR_VARIANTS = SENSOR_VARIANTS + [
|
||||
(PRODUCTION_PCT_ID, PRODUCTION_PCT_NAME),
|
||||
(NET_PRODUCTION_ID, NET_PRODUCTION_NAME),
|
||||
(FROM_GRID_ID, FROM_GRID_NAME),
|
||||
(TO_GRID_ID, TO_GRID_NAME),
|
||||
(SOLAR_POWERED_ID, SOLAR_POWERED_NAME),
|
||||
]
|
||||
|
||||
|
||||
def sense_to_mdi(sense_icon):
|
||||
@@ -86,15 +106,20 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
if device["tags"]["DeviceListAllowed"] == "true"
|
||||
]
|
||||
|
||||
for var in SENSOR_VARIANTS:
|
||||
for variant_id, variant_name in SENSOR_VARIANTS:
|
||||
name = ACTIVE_SENSOR_TYPE.name
|
||||
sensor_type = ACTIVE_SENSOR_TYPE.sensor_type
|
||||
is_production = var == PRODUCTION_ID
|
||||
|
||||
unique_id = f"{sense_monitor_id}-active-{var}"
|
||||
unique_id = f"{sense_monitor_id}-active-{variant_id}"
|
||||
devices.append(
|
||||
SenseActiveSensor(
|
||||
data, name, sensor_type, is_production, sense_monitor_id, var, unique_id
|
||||
data,
|
||||
name,
|
||||
sensor_type,
|
||||
sense_monitor_id,
|
||||
variant_id,
|
||||
variant_name,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -102,18 +127,18 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
devices.append(SenseVoltageSensor(data, i, sense_monitor_id))
|
||||
|
||||
for type_id, typ in TRENDS_SENSOR_TYPES.items():
|
||||
for var in SENSOR_VARIANTS:
|
||||
for variant_id, variant_name in TREND_SENSOR_VARIANTS:
|
||||
name = typ.name
|
||||
sensor_type = typ.sensor_type
|
||||
is_production = var == PRODUCTION_ID
|
||||
|
||||
unique_id = f"{sense_monitor_id}-{type_id}-{var}"
|
||||
unique_id = f"{sense_monitor_id}-{type_id}-{variant_id}"
|
||||
devices.append(
|
||||
SenseTrendsSensor(
|
||||
data,
|
||||
name,
|
||||
sensor_type,
|
||||
is_production,
|
||||
variant_id,
|
||||
variant_name,
|
||||
trends_coordinator,
|
||||
unique_id,
|
||||
)
|
||||
@@ -137,19 +162,19 @@ class SenseActiveSensor(SensorEntity):
|
||||
data,
|
||||
name,
|
||||
sensor_type,
|
||||
is_production,
|
||||
sense_monitor_id,
|
||||
sensor_id,
|
||||
variant_id,
|
||||
variant_name,
|
||||
unique_id,
|
||||
):
|
||||
"""Initialize the Sense sensor."""
|
||||
name_type = PRODUCTION_NAME if is_production else CONSUMPTION_NAME
|
||||
self._attr_name = f"{name} {name_type}"
|
||||
self._attr_name = f"{name} {variant_name}"
|
||||
self._attr_unique_id = unique_id
|
||||
self._data = data
|
||||
self._sense_monitor_id = sense_monitor_id
|
||||
self._sensor_type = sensor_type
|
||||
self._is_production = is_production
|
||||
self._variant_id = variant_id
|
||||
self._variant_name = variant_name
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
@@ -166,7 +191,7 @@ class SenseActiveSensor(SensorEntity):
|
||||
"""Update the sensor from the data. Must not do I/O."""
|
||||
new_state = round(
|
||||
self._data.active_solar_power
|
||||
if self._is_production
|
||||
if self._variant_id == PRODUCTION_ID
|
||||
else self._data.active_power
|
||||
)
|
||||
if self._attr_available and self._attr_native_value == new_state:
|
||||
@@ -235,24 +260,30 @@ class SenseTrendsSensor(SensorEntity):
|
||||
data,
|
||||
name,
|
||||
sensor_type,
|
||||
is_production,
|
||||
variant_id,
|
||||
variant_name,
|
||||
trends_coordinator,
|
||||
unique_id,
|
||||
):
|
||||
"""Initialize the Sense sensor."""
|
||||
name_type = PRODUCTION_NAME if is_production else CONSUMPTION_NAME
|
||||
self._attr_name = f"{name} {name_type}"
|
||||
self._attr_name = f"{name} {variant_name}"
|
||||
self._attr_unique_id = unique_id
|
||||
self._data = data
|
||||
self._sensor_type = sensor_type
|
||||
self._coordinator = trends_coordinator
|
||||
self._is_production = is_production
|
||||
self._variant_id = variant_id
|
||||
self._had_any_update = False
|
||||
|
||||
if variant_id in [PRODUCTION_PCT_ID, SOLAR_POWERED_ID]:
|
||||
self._attr_native_unit_of_measurement = PERCENTAGE
|
||||
self._attr_entity_registry_enabled_default = False
|
||||
self._attr_state_class = None
|
||||
self._attr_device_class = None
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
return round(self._data.get_trend(self._sensor_type, self._is_production), 1)
|
||||
return round(self._data.get_trend(self._sensor_type, self._variant_id), 1)
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
|
||||
@@ -108,6 +108,7 @@ UNIT_CONVERSIONS: dict[str, dict[str, Callable]] = {
|
||||
}
|
||||
|
||||
# Keep track of entities for which a warning about decreasing value has been logged
|
||||
SEEN_DIP = "sensor_seen_total_increasing_dip"
|
||||
WARN_DIP = "sensor_warn_total_increasing_dip"
|
||||
# Keep track of entities for which a warning about unsupported unit has been logged
|
||||
WARN_UNSUPPORTED_UNIT = "sensor_warn_unsupported_unit"
|
||||
@@ -199,11 +200,18 @@ def _normalize_states(
|
||||
hass.data[WARN_UNSTABLE_UNIT] = set()
|
||||
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
|
||||
hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
|
||||
extra = ""
|
||||
if old_metadata := statistics.get_metadata(hass, entity_id):
|
||||
extra = (
|
||||
" and matches the unit of already compiled statistics "
|
||||
f"({old_metadata['unit_of_measurement']})"
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"The unit of %s is changing, got %s, generation of long term "
|
||||
"statistics will be suppressed unless the unit is stable",
|
||||
"The unit of %s is changing, got multiple %s, generation of long term "
|
||||
"statistics will be suppressed unless the unit is stable%s",
|
||||
entity_id,
|
||||
all_units,
|
||||
extra,
|
||||
)
|
||||
return None, []
|
||||
unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
@@ -233,7 +241,17 @@ def _normalize_states(
|
||||
|
||||
|
||||
def warn_dip(hass: HomeAssistant, entity_id: str) -> None:
|
||||
"""Log a warning once if a sensor with state_class_total has a decreasing value."""
|
||||
"""Log a warning once if a sensor with state_class_total has a decreasing value.
|
||||
|
||||
The log will be suppressed until two dips have been seen to prevent warning due to
|
||||
rounding issues with databases storing the state as a single precision float, which
|
||||
was fixed in recorder DB version 20.
|
||||
"""
|
||||
if SEEN_DIP not in hass.data:
|
||||
hass.data[SEEN_DIP] = set()
|
||||
if entity_id not in hass.data[SEEN_DIP]:
|
||||
hass.data[SEEN_DIP].add(entity_id)
|
||||
return
|
||||
if WARN_DIP not in hass.data:
|
||||
hass.data[WARN_DIP] = set()
|
||||
if entity_id not in hass.data[WARN_DIP]:
|
||||
@@ -264,7 +282,22 @@ def reset_detected(
|
||||
return state < 0.9 * previous_state
|
||||
|
||||
|
||||
def compile_statistics(
|
||||
def _wanted_statistics(
|
||||
entities: list[tuple[str, str, str | None]]
|
||||
) -> dict[str, set[str]]:
|
||||
"""Prepare a dict with wanted statistics for entities."""
|
||||
wanted_statistics = {}
|
||||
for entity_id, state_class, device_class in entities:
|
||||
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
|
||||
wanted_statistics[entity_id] = DEVICE_CLASS_STATISTICS[state_class][
|
||||
device_class
|
||||
]
|
||||
else:
|
||||
wanted_statistics[entity_id] = DEFAULT_STATISTICS[state_class]
|
||||
return wanted_statistics
|
||||
|
||||
|
||||
def compile_statistics( # noqa: C901
|
||||
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
|
||||
) -> dict:
|
||||
"""Compile statistics for all entities during start-end.
|
||||
@@ -275,17 +308,32 @@ def compile_statistics(
|
||||
|
||||
entities = _get_entities(hass)
|
||||
|
||||
wanted_statistics = _wanted_statistics(entities)
|
||||
|
||||
# Get history between start and end
|
||||
history_list = history.get_significant_states( # type: ignore
|
||||
hass, start - datetime.timedelta.resolution, end, [i[0] for i in entities]
|
||||
)
|
||||
entities_full_history = [i[0] for i in entities if "sum" in wanted_statistics[i[0]]]
|
||||
history_list = {}
|
||||
if entities_full_history:
|
||||
history_list = history.get_significant_states( # type: ignore
|
||||
hass,
|
||||
start - datetime.timedelta.resolution,
|
||||
end,
|
||||
entity_ids=entities_full_history,
|
||||
significant_changes_only=False,
|
||||
)
|
||||
entities_significant_history = [
|
||||
i[0] for i in entities if "sum" not in wanted_statistics[i[0]]
|
||||
]
|
||||
if entities_significant_history:
|
||||
_history_list = history.get_significant_states( # type: ignore
|
||||
hass,
|
||||
start - datetime.timedelta.resolution,
|
||||
end,
|
||||
entity_ids=entities_significant_history,
|
||||
)
|
||||
history_list = {**history_list, **_history_list}
|
||||
|
||||
for entity_id, state_class, device_class in entities:
|
||||
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
|
||||
wanted_statistics = DEVICE_CLASS_STATISTICS[state_class][device_class]
|
||||
else:
|
||||
wanted_statistics = DEFAULT_STATISTICS[state_class]
|
||||
|
||||
if entity_id not in history_list:
|
||||
continue
|
||||
|
||||
@@ -309,7 +357,7 @@ def compile_statistics(
|
||||
entity_id,
|
||||
unit,
|
||||
old_metadata["unit_of_measurement"],
|
||||
unit,
|
||||
old_metadata["unit_of_measurement"],
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -318,21 +366,21 @@ def compile_statistics(
|
||||
# Set meta data
|
||||
result[entity_id]["meta"] = {
|
||||
"unit_of_measurement": unit,
|
||||
"has_mean": "mean" in wanted_statistics,
|
||||
"has_sum": "sum" in wanted_statistics,
|
||||
"has_mean": "mean" in wanted_statistics[entity_id],
|
||||
"has_sum": "sum" in wanted_statistics[entity_id],
|
||||
}
|
||||
|
||||
# Make calculations
|
||||
stat: dict = {}
|
||||
if "max" in wanted_statistics:
|
||||
if "max" in wanted_statistics[entity_id]:
|
||||
stat["max"] = max(*itertools.islice(zip(*fstates), 1))
|
||||
if "min" in wanted_statistics:
|
||||
if "min" in wanted_statistics[entity_id]:
|
||||
stat["min"] = min(*itertools.islice(zip(*fstates), 1))
|
||||
|
||||
if "mean" in wanted_statistics:
|
||||
if "mean" in wanted_statistics[entity_id]:
|
||||
stat["mean"] = _time_weighted_average(fstates, start, end)
|
||||
|
||||
if "sum" in wanted_statistics:
|
||||
if "sum" in wanted_statistics[entity_id]:
|
||||
last_reset = old_last_reset = None
|
||||
new_state = old_state = None
|
||||
_sum = 0
|
||||
@@ -341,7 +389,7 @@ def compile_statistics(
|
||||
# We have compiled history for this sensor before, use that as a starting point
|
||||
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
|
||||
new_state = old_state = last_stats[entity_id][0]["state"]
|
||||
_sum = last_stats[entity_id][0]["sum"]
|
||||
_sum = last_stats[entity_id][0]["sum"] or 0
|
||||
|
||||
for fstate, state in fstates:
|
||||
|
||||
@@ -358,6 +406,19 @@ def compile_statistics(
|
||||
and (last_reset := state.attributes.get("last_reset"))
|
||||
!= old_last_reset
|
||||
):
|
||||
if old_state is None:
|
||||
_LOGGER.info(
|
||||
"Compiling initial sum statistics for %s, zero point set to %s",
|
||||
entity_id,
|
||||
fstate,
|
||||
)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Detected new cycle for %s, last_reset set to %s (old last_reset %s)",
|
||||
entity_id,
|
||||
last_reset,
|
||||
old_last_reset,
|
||||
)
|
||||
reset = True
|
||||
elif old_state is None and last_reset is None:
|
||||
reset = True
|
||||
@@ -372,7 +433,7 @@ def compile_statistics(
|
||||
):
|
||||
reset = True
|
||||
_LOGGER.info(
|
||||
"Detected new cycle for %s, zero point set to %s (old zero point %s)",
|
||||
"Detected new cycle for %s, value dropped from %s to %s",
|
||||
entity_id,
|
||||
fstate,
|
||||
new_state,
|
||||
@@ -385,11 +446,8 @@ def compile_statistics(
|
||||
# ..and update the starting point
|
||||
new_state = fstate
|
||||
old_last_reset = last_reset
|
||||
# Force a new cycle for STATE_CLASS_TOTAL_INCREASING to start at 0
|
||||
if (
|
||||
state_class == STATE_CLASS_TOTAL_INCREASING
|
||||
and old_state is not None
|
||||
):
|
||||
# Force a new cycle for an existing sensor to start at 0
|
||||
if old_state is not None:
|
||||
old_state = 0.0
|
||||
else:
|
||||
old_state = new_state
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Binary sensor for Shelly."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
from typing import Final, cast
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_CONNECTIVITY,
|
||||
@@ -46,7 +46,9 @@ SENSORS: Final = {
|
||||
name="Overpowering", device_class=DEVICE_CLASS_PROBLEM
|
||||
),
|
||||
("sensor", "dwIsOpened"): BlockAttributeDescription(
|
||||
name="Door", device_class=DEVICE_CLASS_OPENING
|
||||
name="Door",
|
||||
device_class=DEVICE_CLASS_OPENING,
|
||||
available=lambda block: cast(bool, block.dwIsOpened != -1),
|
||||
),
|
||||
("sensor", "flood"): BlockAttributeDescription(
|
||||
name="Flood", device_class=DEVICE_CLASS_MOISTURE
|
||||
|
||||
@@ -40,6 +40,7 @@ SENSORS: Final = {
|
||||
device_class=sensor.DEVICE_CLASS_BATTERY,
|
||||
state_class=sensor.STATE_CLASS_MEASUREMENT,
|
||||
removal_condition=lambda settings, _: settings.get("external_power") == 1,
|
||||
available=lambda block: cast(bool, block.battery != -1),
|
||||
),
|
||||
("device", "deviceTemp"): BlockAttributeDescription(
|
||||
name="Device Temperature",
|
||||
@@ -176,6 +177,7 @@ SENSORS: Final = {
|
||||
unit=LIGHT_LUX,
|
||||
device_class=sensor.DEVICE_CLASS_ILLUMINANCE,
|
||||
state_class=sensor.STATE_CLASS_MEASUREMENT,
|
||||
available=lambda block: cast(bool, block.luminosity != -1),
|
||||
),
|
||||
("sensor", "tilt"): BlockAttributeDescription(
|
||||
name="Tilt",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==11.0.4"],
|
||||
"requirements": ["simplisafe-python==11.0.6"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -561,7 +561,7 @@ class SmartThingsPowerConsumptionSensor(SmartThingsEntity, SensorEntity):
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._device.device_id}.{self.report_name}"
|
||||
return f"{self._device.device_id}.{self.report_name}_meter"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
|
||||
@@ -1,12 +1,28 @@
|
||||
"""Solar-Log integration."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from requests.exceptions import HTTPError, Timeout
|
||||
from sunwatcher.solarlog.solarlog import SolarLog
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import update_coordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = ["sensor"]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry for solarlog."""
|
||||
coordinator = SolarlogData(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
@@ -14,3 +30,73 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass, entry):
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
class SolarlogData(update_coordinator.DataUpdateCoordinator):
|
||||
"""Get and update the latest data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize the data object."""
|
||||
super().__init__(
|
||||
hass, _LOGGER, name="SolarLog", update_interval=timedelta(seconds=60)
|
||||
)
|
||||
|
||||
host_entry = entry.data[CONF_HOST]
|
||||
|
||||
url = urlparse(host_entry, "http")
|
||||
netloc = url.netloc or url.path
|
||||
path = url.path if url.netloc else ""
|
||||
url = ParseResult("http", netloc, path, *url[3:])
|
||||
self.unique_id = entry.entry_id
|
||||
self.name = entry.title
|
||||
self.host = url.geturl()
|
||||
|
||||
async def _async_update_data(self):
|
||||
"""Update the data from the SolarLog device."""
|
||||
try:
|
||||
api = await self.hass.async_add_executor_job(SolarLog, self.host)
|
||||
except (OSError, Timeout, HTTPError) as err:
|
||||
raise update_coordinator.UpdateFailed(err)
|
||||
|
||||
if api.time.year == 1999:
|
||||
raise update_coordinator.UpdateFailed(
|
||||
"Invalid data returned (can happen after Solarlog restart)."
|
||||
)
|
||||
|
||||
self.logger.debug(
|
||||
"Connection to Solarlog successful. Retrieving latest Solarlog update of %s",
|
||||
api.time,
|
||||
)
|
||||
|
||||
data = {}
|
||||
|
||||
try:
|
||||
data["TIME"] = api.time
|
||||
data["powerAC"] = api.power_ac
|
||||
data["powerDC"] = api.power_dc
|
||||
data["voltageAC"] = api.voltage_ac
|
||||
data["voltageDC"] = api.voltage_dc
|
||||
data["yieldDAY"] = api.yield_day / 1000
|
||||
data["yieldYESTERDAY"] = api.yield_yesterday / 1000
|
||||
data["yieldMONTH"] = api.yield_month / 1000
|
||||
data["yieldYEAR"] = api.yield_year / 1000
|
||||
data["yieldTOTAL"] = api.yield_total / 1000
|
||||
data["consumptionAC"] = api.consumption_ac
|
||||
data["consumptionDAY"] = api.consumption_day / 1000
|
||||
data["consumptionYESTERDAY"] = api.consumption_yesterday / 1000
|
||||
data["consumptionMONTH"] = api.consumption_month / 1000
|
||||
data["consumptionYEAR"] = api.consumption_year / 1000
|
||||
data["consumptionTOTAL"] = api.consumption_total / 1000
|
||||
data["totalPOWER"] = api.total_power
|
||||
data["alternatorLOSS"] = api.alternator_loss
|
||||
data["CAPACITY"] = round(api.capacity * 100, 0)
|
||||
data["EFFICIENCY"] = round(api.efficiency * 100, 0)
|
||||
data["powerAVAILABLE"] = api.power_available
|
||||
data["USAGE"] = round(api.usage * 100, 0)
|
||||
except AttributeError as err:
|
||||
raise update_coordinator.UpdateFailed(
|
||||
f"Missing details data in Solarlog response: {err}"
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Updated Solarlog overview data: %s", data)
|
||||
return data
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
@@ -23,13 +22,10 @@ from homeassistant.const import (
|
||||
|
||||
DOMAIN = "solarlog"
|
||||
|
||||
"""Default config for solarlog."""
|
||||
# Default config for solarlog.
|
||||
DEFAULT_HOST = "http://solar-log"
|
||||
DEFAULT_NAME = "solarlog"
|
||||
|
||||
"""Fixed constants."""
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SolarlogRequiredKeysMixin:
|
||||
|
||||
@@ -1,133 +1,42 @@
|
||||
"""Platform for solarlog sensors."""
|
||||
import logging
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from requests.exceptions import HTTPError, Timeout
|
||||
from sunwatcher.solarlog.solarlog import SolarLog
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.helpers import update_coordinator
|
||||
from homeassistant.helpers.entity import StateType
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL, SENSOR_TYPES, SolarLogSensorEntityDescription
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the solarlog platform."""
|
||||
_LOGGER.warning(
|
||||
"Configuration of the solarlog platform in configuration.yaml is deprecated "
|
||||
"in Home Assistant 0.119. Please remove entry from your configuration"
|
||||
)
|
||||
from . import SolarlogData
|
||||
from .const import DOMAIN, SENSOR_TYPES, SolarLogSensorEntityDescription
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry, async_add_entities):
|
||||
"""Add solarlog entry."""
|
||||
host_entry = entry.data[CONF_HOST]
|
||||
device_name = entry.title
|
||||
|
||||
url = urlparse(host_entry, "http")
|
||||
netloc = url.netloc or url.path
|
||||
path = url.path if url.netloc else ""
|
||||
url = ParseResult("http", netloc, path, *url[3:])
|
||||
host = url.geturl()
|
||||
|
||||
try:
|
||||
api = await hass.async_add_executor_job(SolarLog, host)
|
||||
_LOGGER.debug("Connected to Solar-Log device, setting up entries")
|
||||
except (OSError, HTTPError, Timeout):
|
||||
_LOGGER.error(
|
||||
"Could not connect to Solar-Log device at %s, check host ip address", host
|
||||
)
|
||||
return
|
||||
|
||||
# Create solarlog data service which will retrieve and update the data.
|
||||
data = await hass.async_add_executor_job(SolarlogData, hass, api, host)
|
||||
|
||||
# Create a new sensor for each sensor type.
|
||||
entities = [
|
||||
SolarlogSensor(entry.entry_id, device_name, data, description)
|
||||
for description in SENSOR_TYPES
|
||||
]
|
||||
async_add_entities(entities, True)
|
||||
return True
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
SolarlogSensor(coordinator, description) for description in SENSOR_TYPES
|
||||
)
|
||||
|
||||
|
||||
class SolarlogData:
|
||||
"""Get and update the latest data."""
|
||||
|
||||
def __init__(self, hass, api, host):
|
||||
"""Initialize the data object."""
|
||||
self.api = api
|
||||
self.hass = hass
|
||||
self.host = host
|
||||
self.update = Throttle(SCAN_INTERVAL)(self._update)
|
||||
self.data = {}
|
||||
|
||||
def _update(self):
|
||||
"""Update the data from the SolarLog device."""
|
||||
try:
|
||||
self.api = SolarLog(self.host)
|
||||
response = self.api.time
|
||||
_LOGGER.debug(
|
||||
"Connection to Solarlog successful. Retrieving latest Solarlog update of %s",
|
||||
response,
|
||||
)
|
||||
except (OSError, Timeout, HTTPError):
|
||||
_LOGGER.error("Connection error, Could not retrieve data, skipping update")
|
||||
return
|
||||
|
||||
try:
|
||||
self.data["TIME"] = self.api.time
|
||||
self.data["powerAC"] = self.api.power_ac
|
||||
self.data["powerDC"] = self.api.power_dc
|
||||
self.data["voltageAC"] = self.api.voltage_ac
|
||||
self.data["voltageDC"] = self.api.voltage_dc
|
||||
self.data["yieldDAY"] = self.api.yield_day / 1000
|
||||
self.data["yieldYESTERDAY"] = self.api.yield_yesterday / 1000
|
||||
self.data["yieldMONTH"] = self.api.yield_month / 1000
|
||||
self.data["yieldYEAR"] = self.api.yield_year / 1000
|
||||
self.data["yieldTOTAL"] = self.api.yield_total / 1000
|
||||
self.data["consumptionAC"] = self.api.consumption_ac
|
||||
self.data["consumptionDAY"] = self.api.consumption_day / 1000
|
||||
self.data["consumptionYESTERDAY"] = self.api.consumption_yesterday / 1000
|
||||
self.data["consumptionMONTH"] = self.api.consumption_month / 1000
|
||||
self.data["consumptionYEAR"] = self.api.consumption_year / 1000
|
||||
self.data["consumptionTOTAL"] = self.api.consumption_total / 1000
|
||||
self.data["totalPOWER"] = self.api.total_power
|
||||
self.data["alternatorLOSS"] = self.api.alternator_loss
|
||||
self.data["CAPACITY"] = round(self.api.capacity * 100, 0)
|
||||
self.data["EFFICIENCY"] = round(self.api.efficiency * 100, 0)
|
||||
self.data["powerAVAILABLE"] = self.api.power_available
|
||||
self.data["USAGE"] = round(self.api.usage * 100, 0)
|
||||
_LOGGER.debug("Updated Solarlog overview data: %s", self.data)
|
||||
except AttributeError:
|
||||
_LOGGER.error("Missing details data in Solarlog response")
|
||||
|
||||
|
||||
class SolarlogSensor(SensorEntity):
|
||||
class SolarlogSensor(update_coordinator.CoordinatorEntity, SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
entity_description: SolarLogSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry_id: str,
|
||||
device_name: str,
|
||||
data: SolarlogData,
|
||||
coordinator: SolarlogData,
|
||||
description: SolarLogSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self.data = data
|
||||
self._attr_name = f"{device_name} {description.name}"
|
||||
self._attr_unique_id = f"{entry_id}_{description.key}"
|
||||
self._attr_name = f"{coordinator.name} {description.name}"
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_device_info = {
|
||||
"identifiers": {(DOMAIN, entry_id)},
|
||||
"name": device_name,
|
||||
"identifiers": {(DOMAIN, coordinator.unique_id)},
|
||||
"name": coordinator.name,
|
||||
"manufacturer": "Solar-Log",
|
||||
}
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data from the sensor and update the state."""
|
||||
self.data.update()
|
||||
self._attr_native_value = self.data.data[self.entity_description.json_key]
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native sensor value."""
|
||||
return self.coordinator.data[self.entity_description.json_key]
|
||||
|
||||
@@ -64,9 +64,7 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the flow."""
|
||||
self._reauth = False
|
||||
self._entry_id = None
|
||||
self._entry_data = {}
|
||||
self.entry = None
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@@ -76,10 +74,7 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_reauth(self, data: dict[str, Any] | None = None) -> FlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._reauth = True
|
||||
self._entry_data = dict(data)
|
||||
entry = await self.async_set_unique_id(self.unique_id)
|
||||
self._entry_id = entry.entry_id
|
||||
self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
@@ -90,7 +85,7 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={"host": self._entry_data[CONF_HOST]},
|
||||
description_placeholders={"host": self.entry.data[CONF_HOST]},
|
||||
data_schema=vol.Schema({}),
|
||||
errors={},
|
||||
)
|
||||
@@ -104,8 +99,8 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
if self._reauth:
|
||||
user_input = {**self._entry_data, **user_input}
|
||||
if self.entry:
|
||||
user_input = {**self.entry.data, **user_input}
|
||||
|
||||
if CONF_VERIFY_SSL not in user_input:
|
||||
user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL
|
||||
@@ -120,10 +115,8 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
else:
|
||||
if self._reauth:
|
||||
return await self._async_reauth_update_entry(
|
||||
self._entry_id, user_input
|
||||
)
|
||||
if self.entry:
|
||||
return await self._async_reauth_update_entry(user_input)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST], data=user_input
|
||||
@@ -136,17 +129,16 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _async_reauth_update_entry(self, entry_id: str, data: dict) -> FlowResult:
|
||||
async def _async_reauth_update_entry(self, data: dict) -> FlowResult:
|
||||
"""Update existing config entry."""
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
self.hass.config_entries.async_update_entry(entry, data=data)
|
||||
await self.hass.config_entries.async_reload(entry.entry_id)
|
||||
self.hass.config_entries.async_update_entry(self.entry, data=data)
|
||||
await self.hass.config_entries.async_reload(self.entry.entry_id)
|
||||
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
def _get_user_data_schema(self) -> dict[str, Any]:
|
||||
"""Get the data schema to display user form."""
|
||||
if self._reauth:
|
||||
if self.entry:
|
||||
return {vol.Required(CONF_API_KEY): str}
|
||||
|
||||
data_schema = {
|
||||
|
||||
@@ -223,6 +223,7 @@ async def async_setup_entry(
|
||||
{
|
||||
vol.Required(ATTR_ALARM_ID): cv.positive_int,
|
||||
vol.Optional(ATTR_TIME): cv.time,
|
||||
vol.Optional(ATTR_VOLUME): cv.small_float,
|
||||
vol.Optional(ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(ATTR_INCLUDE_LINKED_ZONES): cv.boolean,
|
||||
},
|
||||
|
||||
@@ -323,6 +323,18 @@ class SonosSpeaker:
|
||||
async def async_subscribe(self) -> bool:
|
||||
"""Initiate event subscriptions."""
|
||||
_LOGGER.debug("Creating subscriptions for %s", self.zone_name)
|
||||
|
||||
# Create a polling task in case subscriptions fail or callback events do not arrive
|
||||
if not self._poll_timer:
|
||||
self._poll_timer = self.hass.helpers.event.async_track_time_interval(
|
||||
partial(
|
||||
async_dispatcher_send,
|
||||
self.hass,
|
||||
f"{SONOS_POLL_UPDATE}-{self.soco.uid}",
|
||||
),
|
||||
SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(self.set_basic_info)
|
||||
|
||||
@@ -337,10 +349,10 @@ class SonosSpeaker:
|
||||
for service in SUBSCRIPTION_SERVICES
|
||||
]
|
||||
await asyncio.gather(*subscriptions)
|
||||
return True
|
||||
except SoCoException as ex:
|
||||
_LOGGER.warning("Could not connect %s: %s", self.zone_name, ex)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def _subscribe(
|
||||
self, target: SubscriptionBase, sub_callback: Callable
|
||||
@@ -497,15 +509,6 @@ class SonosSpeaker:
|
||||
self.soco.ip_address,
|
||||
)
|
||||
|
||||
self._poll_timer = self.hass.helpers.event.async_track_time_interval(
|
||||
partial(
|
||||
async_dispatcher_send,
|
||||
self.hass,
|
||||
f"{SONOS_POLL_UPDATE}-{self.soco.uid}",
|
||||
),
|
||||
SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
if self._is_ready and not self.subscriptions_failed:
|
||||
done = await self.async_subscribe()
|
||||
if not done:
|
||||
@@ -567,15 +570,6 @@ class SonosSpeaker:
|
||||
self._seen_timer = self.hass.helpers.event.async_call_later(
|
||||
SEEN_EXPIRE_TIME.total_seconds(), self.async_unseen
|
||||
)
|
||||
if not self._poll_timer:
|
||||
self._poll_timer = self.hass.helpers.event.async_track_time_interval(
|
||||
partial(
|
||||
async_dispatcher_send,
|
||||
self.hass,
|
||||
f"{SONOS_POLL_UPDATE}-{self.soco.uid}",
|
||||
),
|
||||
SCAN_INTERVAL,
|
||||
)
|
||||
self.async_write_entity_states()
|
||||
|
||||
#
|
||||
|
||||
@@ -286,6 +286,11 @@ class Scanner:
|
||||
if header_st is not None:
|
||||
self.seen.add((header_st, header_location))
|
||||
|
||||
def _async_unsee(self, header_st: str | None, header_location: str | None) -> None:
|
||||
"""If we see a device in a new location, unsee the original location."""
|
||||
if header_st is not None:
|
||||
self.seen.remove((header_st, header_location))
|
||||
|
||||
async def _async_process_entry(self, headers: Mapping[str, str]) -> None:
|
||||
"""Process SSDP entries."""
|
||||
_LOGGER.debug("_async_process_entry: %s", headers)
|
||||
@@ -293,7 +298,12 @@ class Scanner:
|
||||
h_location = headers.get("location")
|
||||
|
||||
if h_st and (udn := _udn_from_usn(headers.get("usn"))):
|
||||
self.cache[(udn, h_st)] = headers
|
||||
cache_key = (udn, h_st)
|
||||
if old_headers := self.cache.get(cache_key):
|
||||
old_h_location = old_headers.get("location")
|
||||
if h_location != old_h_location:
|
||||
self._async_unsee(old_headers.get("st"), old_h_location)
|
||||
self.cache[cache_key] = headers
|
||||
|
||||
callbacks = self._async_get_matching_callbacks(headers)
|
||||
if self._async_seen(h_st, h_location) and not callbacks:
|
||||
|
||||
@@ -168,10 +168,7 @@ class TadoHomeSensor(TadoHomeEntity, SensorEntity):
|
||||
return
|
||||
|
||||
if self.home_variable == "outdoor temperature":
|
||||
self._state = self.hass.config.units.temperature(
|
||||
self._tado_weather_data["outsideTemperature"]["celsius"],
|
||||
TEMP_CELSIUS,
|
||||
)
|
||||
self._state = self._tado_weather_data["outsideTemperature"]["celsius"]
|
||||
self._state_attributes = {
|
||||
"time": self._tado_weather_data["outsideTemperature"]["timestamp"],
|
||||
}
|
||||
@@ -245,7 +242,7 @@ class TadoZoneSensor(TadoZoneEntity, SensorEntity):
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
if self.zone_variable == "temperature":
|
||||
return self.hass.config.units.temperature_unit
|
||||
return TEMP_CELSIUS
|
||||
if self.zone_variable == "humidity":
|
||||
return PERCENTAGE
|
||||
if self.zone_variable == "heating":
|
||||
@@ -277,9 +274,7 @@ class TadoZoneSensor(TadoZoneEntity, SensorEntity):
|
||||
return
|
||||
|
||||
if self.zone_variable == "temperature":
|
||||
self._state = self.hass.config.units.temperature(
|
||||
self._tado_zone_data.current_temp, TEMP_CELSIUS
|
||||
)
|
||||
self._state = self._tado_zone_data.current_temp
|
||||
self._state_attributes = {
|
||||
"time": self._tado_zone_data.current_temp_timestamp,
|
||||
"setting": 0, # setting is used in climate device
|
||||
|
||||
@@ -29,7 +29,6 @@ from .entity import TractiveEntity
|
||||
class TractiveSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Tractive sensor entities."""
|
||||
|
||||
attributes: tuple = ()
|
||||
entity_class: type[TractiveSensor] | None = None
|
||||
|
||||
|
||||
@@ -40,6 +39,7 @@ class TractiveSensor(TractiveEntity, SensorEntity):
|
||||
"""Initialize sensor entity."""
|
||||
super().__init__(user_id, trackable, tracker_details)
|
||||
|
||||
self._attr_name = f"{trackable['details']['name']} {description.name}"
|
||||
self._attr_unique_id = unique_id
|
||||
self.entity_description = description
|
||||
|
||||
@@ -53,11 +53,6 @@ class TractiveSensor(TractiveEntity, SensorEntity):
|
||||
class TractiveHardwareSensor(TractiveSensor):
|
||||
"""Tractive hardware sensor."""
|
||||
|
||||
def __init__(self, user_id, trackable, tracker_details, unique_id, description):
|
||||
"""Initialize sensor entity."""
|
||||
super().__init__(user_id, trackable, tracker_details, unique_id, description)
|
||||
self._attr_name = f"{self._tracker_id} {description.name}"
|
||||
|
||||
@callback
|
||||
def handle_hardware_status_update(self, event):
|
||||
"""Handle hardware status update."""
|
||||
@@ -88,19 +83,10 @@ class TractiveHardwareSensor(TractiveSensor):
|
||||
class TractiveActivitySensor(TractiveSensor):
|
||||
"""Tractive active sensor."""
|
||||
|
||||
def __init__(self, user_id, trackable, tracker_details, unique_id, description):
|
||||
"""Initialize sensor entity."""
|
||||
super().__init__(user_id, trackable, tracker_details, unique_id, description)
|
||||
self._attr_name = f"{trackable['details']['name']} {description.name}"
|
||||
|
||||
@callback
|
||||
def handle_activity_status_update(self, event):
|
||||
"""Handle activity status update."""
|
||||
self._attr_native_value = event[self.entity_description.key]
|
||||
self._attr_extra_state_attributes = {
|
||||
attr: event[attr] if attr in event else None
|
||||
for attr in self.entity_description.attributes
|
||||
}
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -137,7 +123,13 @@ SENSOR_TYPES = (
|
||||
name="Minutes Active",
|
||||
icon="mdi:clock-time-eight-outline",
|
||||
native_unit_of_measurement=TIME_MINUTES,
|
||||
attributes=(ATTR_DAILY_GOAL,),
|
||||
entity_class=TractiveActivitySensor,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
key=ATTR_DAILY_GOAL,
|
||||
name="Daily Goal",
|
||||
icon="mdi:flag-checkered",
|
||||
native_unit_of_measurement=TIME_MINUTES,
|
||||
entity_class=TractiveActivitySensor,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@@ -72,6 +73,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _fnmatch_lower(name: str | None, pattern: str) -> bool:
|
||||
"""Match a lowercase version of the name."""
|
||||
if name is None:
|
||||
return False
|
||||
return fnmatch.fnmatch(name.lower(), pattern)
|
||||
|
||||
|
||||
class USBDiscovery:
|
||||
"""Manage USB Discovery."""
|
||||
|
||||
@@ -119,7 +127,13 @@ class USBDiscovery:
|
||||
return
|
||||
|
||||
monitor = Monitor.from_netlink(context)
|
||||
monitor.filter_by(subsystem="tty")
|
||||
try:
|
||||
monitor.filter_by(subsystem="tty")
|
||||
except ValueError as ex: # this fails on WSL
|
||||
_LOGGER.debug(
|
||||
"Unable to setup pyudev filtering; This is expected on WSL: %s", ex
|
||||
)
|
||||
return
|
||||
observer = MonitorObserver(
|
||||
monitor, callback=self._device_discovered, name="usb-observer"
|
||||
)
|
||||
@@ -152,6 +166,18 @@ class USBDiscovery:
|
||||
continue
|
||||
if "pid" in matcher and device.pid != matcher["pid"]:
|
||||
continue
|
||||
if "serial_number" in matcher and not _fnmatch_lower(
|
||||
device.serial_number, matcher["serial_number"]
|
||||
):
|
||||
continue
|
||||
if "manufacturer" in matcher and not _fnmatch_lower(
|
||||
device.manufacturer, matcher["manufacturer"]
|
||||
):
|
||||
continue
|
||||
if "description" in matcher and not _fnmatch_lower(
|
||||
device.description, matcher["description"]
|
||||
):
|
||||
continue
|
||||
flow: USBFlow = {
|
||||
"domain": matcher["domain"],
|
||||
"context": {"source": config_entries.SOURCE_USB},
|
||||
|
||||
@@ -87,7 +87,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
source = HaVersionSource.CONTAINER
|
||||
|
||||
if (
|
||||
source in (HaVersionSource.SUPERVISOR, HaVersionSource.CONTAINER)
|
||||
source == HaVersionSource.CONTAINER
|
||||
and image is not None
|
||||
and image != DEFAULT_IMAGE
|
||||
):
|
||||
|
||||
@@ -3,7 +3,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pywemo import WeMoDevice
|
||||
from pywemo import Insight, WeMoDevice
|
||||
from pywemo.exceptions import ActionException
|
||||
from pywemo.subscribe import EVENT_TYPE_LONG_PRESS
|
||||
|
||||
@@ -81,11 +81,26 @@ class DeviceCoordinator(DataUpdateCoordinator):
|
||||
else:
|
||||
self.async_set_updated_data(None)
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Return True if polling is needed to update the state for the device.
|
||||
|
||||
The alternative, when this returns False, is to rely on the subscription
|
||||
"push updates" to update the device state in Home Assistant.
|
||||
"""
|
||||
if isinstance(self.wemo, Insight) and self.wemo.get_state() == 0:
|
||||
# The WeMo Insight device does not send subscription updates for the
|
||||
# insight_params values when the device is off. Polling is required in
|
||||
# this case so the Sensor entities are properly populated.
|
||||
return True
|
||||
|
||||
registry = self.hass.data[DOMAIN]["registry"]
|
||||
return not (registry.is_subscribed(self.wemo) and self.last_update_success)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update WeMo state."""
|
||||
# No need to poll if the device will push updates.
|
||||
registry = self.hass.data[DOMAIN]["registry"]
|
||||
if registry.is_subscribed(self.wemo) and self.last_update_success:
|
||||
if not self.should_poll:
|
||||
return
|
||||
|
||||
# If an update is in progress, we don't do anything.
|
||||
|
||||
@@ -148,7 +148,7 @@ class WolfLinkState(WolfLinkSensor):
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state converting with supported values."""
|
||||
state = super().state
|
||||
state = super().native_value
|
||||
resolved_state = [
|
||||
item for item in self.wolf_object.items if item.value == int(state)
|
||||
]
|
||||
|
||||
@@ -333,7 +333,7 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity):
|
||||
}
|
||||
)
|
||||
self._mode = self._state_attrs.get(ATTR_MODE)
|
||||
self._fan_level = self.coordinator.data.fan_level
|
||||
self._fan_level = getattr(self.coordinator.data, ATTR_FAN_LEVEL, None)
|
||||
self.async_write_ha_state()
|
||||
|
||||
#
|
||||
@@ -440,7 +440,7 @@ class XiaomiAirPurifier(XiaomiGenericDevice):
|
||||
{attribute: None for attribute in self._available_attributes}
|
||||
)
|
||||
self._mode = self._state_attrs.get(ATTR_MODE)
|
||||
self._fan_level = self.coordinator.data.fan_level
|
||||
self._fan_level = getattr(self.coordinator.data, ATTR_FAN_LEVEL, None)
|
||||
|
||||
@property
|
||||
def preset_mode(self):
|
||||
|
||||
@@ -163,6 +163,8 @@ UPDATE_REQUEST_PROPERTIES = [
|
||||
"active_mode",
|
||||
]
|
||||
|
||||
BULB_EXCEPTIONS = (BulbException, asyncio.TimeoutError)
|
||||
|
||||
PLATFORMS = ["binary_sensor", "light"]
|
||||
|
||||
|
||||
@@ -196,7 +198,6 @@ async def _async_initialize(
|
||||
entry_data = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][entry.entry_id] = {
|
||||
DATA_PLATFORMS_LOADED: False
|
||||
}
|
||||
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
|
||||
|
||||
@callback
|
||||
def _async_load_platforms():
|
||||
@@ -212,6 +213,15 @@ async def _async_initialize(
|
||||
await device.async_setup()
|
||||
entry_data[DATA_DEVICE] = device
|
||||
|
||||
if (
|
||||
device.capabilities
|
||||
and entry.options.get(CONF_MODEL) != device.capabilities["model"]
|
||||
):
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, options={**entry.options, CONF_MODEL: device.capabilities["model"]}
|
||||
)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
|
||||
entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass, DEVICE_INITIALIZED.format(host), _async_load_platforms
|
||||
@@ -264,7 +274,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if entry.data.get(CONF_HOST):
|
||||
try:
|
||||
device = await _async_get_device(hass, entry.data[CONF_HOST], entry)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
# If CONF_ID is not valid we cannot fallback to discovery
|
||||
# so we must retry by raising ConfigEntryNotReady
|
||||
if not entry.data.get(CONF_ID):
|
||||
@@ -279,7 +289,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
host = urlparse(capabilities["location"]).hostname
|
||||
try:
|
||||
await _async_initialize(hass, entry, host)
|
||||
except BulbException:
|
||||
except BULB_EXCEPTIONS:
|
||||
_LOGGER.exception("Failed to connect to bulb at %s", host)
|
||||
|
||||
# discovery
|
||||
@@ -540,10 +550,11 @@ class YeelightDevice:
|
||||
self._config = config
|
||||
self._host = host
|
||||
self._bulb_device = bulb
|
||||
self._capabilities = {}
|
||||
self.capabilities = {}
|
||||
self._device_type = None
|
||||
self._available = False
|
||||
self._initialized = False
|
||||
self._did_first_update = False
|
||||
self._name = None
|
||||
|
||||
@property
|
||||
@@ -574,12 +585,12 @@ class YeelightDevice:
|
||||
@property
|
||||
def model(self):
|
||||
"""Return configured/autodetected device model."""
|
||||
return self._bulb_device.model or self._capabilities.get("model")
|
||||
return self._bulb_device.model or self.capabilities.get("model")
|
||||
|
||||
@property
|
||||
def fw_version(self):
|
||||
"""Return the firmware version."""
|
||||
return self._capabilities.get("fw_ver")
|
||||
return self.capabilities.get("fw_ver")
|
||||
|
||||
@property
|
||||
def is_nightlight_supported(self) -> bool:
|
||||
@@ -639,14 +650,14 @@ class YeelightDevice:
|
||||
await self.bulb.async_turn_on(
|
||||
duration=duration, light_type=light_type, power_mode=power_mode
|
||||
)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to turn the bulb on: %s", ex)
|
||||
|
||||
async def async_turn_off(self, duration=DEFAULT_TRANSITION, light_type=None):
|
||||
"""Turn off device."""
|
||||
try:
|
||||
await self.bulb.async_turn_off(duration=duration, light_type=light_type)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error(
|
||||
"Unable to turn the bulb off: %s, %s: %s", self._host, self.name, ex
|
||||
)
|
||||
@@ -662,7 +673,7 @@ class YeelightDevice:
|
||||
if not self._initialized:
|
||||
self._initialized = True
|
||||
async_dispatcher_send(self._hass, DEVICE_INITIALIZED.format(self._host))
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
if self._available: # just inform once
|
||||
_LOGGER.error(
|
||||
"Unable to update device %s, %s: %s", self._host, self.name, ex
|
||||
@@ -674,20 +685,23 @@ class YeelightDevice:
|
||||
async def async_setup(self):
|
||||
"""Fetch capabilities and setup name if available."""
|
||||
scanner = YeelightScanner.async_get(self._hass)
|
||||
self._capabilities = await scanner.async_get_capabilities(self._host) or {}
|
||||
self.capabilities = await scanner.async_get_capabilities(self._host) or {}
|
||||
if self.capabilities:
|
||||
self._bulb_device.set_capabilities(self.capabilities)
|
||||
if name := self._config.get(CONF_NAME):
|
||||
# Override default name when name is set in config
|
||||
self._name = name
|
||||
elif self._capabilities:
|
||||
elif self.capabilities:
|
||||
# Generate name from model and id when capabilities is available
|
||||
self._name = _async_unique_name(self._capabilities)
|
||||
self._name = _async_unique_name(self.capabilities)
|
||||
else:
|
||||
self._name = self._host # Default name is host
|
||||
|
||||
async def async_update(self):
|
||||
async def async_update(self, force=False):
|
||||
"""Update device properties and send data updated signal."""
|
||||
if self._initialized and self._available:
|
||||
# No need to poll, already connected
|
||||
self._did_first_update = True
|
||||
if not force and self._initialized and self._available:
|
||||
# No need to poll unless force, already connected
|
||||
return
|
||||
await self._async_update_properties()
|
||||
async_dispatcher_send(self._hass, DATA_UPDATED.format(self._host))
|
||||
@@ -695,7 +709,20 @@ class YeelightDevice:
|
||||
@callback
|
||||
def async_update_callback(self, data):
|
||||
"""Update push from device."""
|
||||
was_available = self._available
|
||||
self._available = data.get(KEY_CONNECTED, True)
|
||||
if self._did_first_update and not was_available and self._available:
|
||||
# On reconnect the properties may be out of sync
|
||||
#
|
||||
# We need to make sure the DEVICE_INITIALIZED dispatcher is setup
|
||||
# before we can update on reconnect by checking self._did_first_update
|
||||
#
|
||||
# If the device drops the connection right away, we do not want to
|
||||
# do a property resync via async_update since its about
|
||||
# to be called when async_setup_entry reaches the end of the
|
||||
# function
|
||||
#
|
||||
asyncio.create_task(self.async_update(True))
|
||||
async_dispatcher_send(self._hass, DATA_UPDATED.format(self._host))
|
||||
|
||||
|
||||
|
||||
@@ -96,7 +96,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=async_format_model_id(self._discovered_model, self.unique_id),
|
||||
data={CONF_ID: self.unique_id, CONF_HOST: self._discovered_ip},
|
||||
data={
|
||||
CONF_ID: self.unique_id,
|
||||
CONF_HOST: self._discovered_ip,
|
||||
CONF_MODEL: self._discovered_model,
|
||||
},
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
@@ -129,6 +133,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_ID: self.unique_id,
|
||||
CONF_MODEL: model,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -151,7 +156,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
host = urlparse(capabilities["location"]).hostname
|
||||
return self.async_create_entry(
|
||||
title=_async_unique_name(capabilities),
|
||||
data={CONF_ID: unique_id, CONF_HOST: host},
|
||||
data={
|
||||
CONF_ID: unique_id,
|
||||
CONF_HOST: host,
|
||||
CONF_MODEL: capabilities["model"],
|
||||
},
|
||||
)
|
||||
|
||||
configured_devices = {
|
||||
|
||||
@@ -6,7 +6,7 @@ import math
|
||||
|
||||
import voluptuous as vol
|
||||
import yeelight
|
||||
from yeelight import Bulb, BulbException, Flow, RGBTransition, SleepTransition, flows
|
||||
from yeelight import Bulb, Flow, RGBTransition, SleepTransition, flows
|
||||
from yeelight.enums import BulbType, LightType, PowerMode, SceneClass
|
||||
|
||||
from homeassistant.components.light import (
|
||||
@@ -49,6 +49,7 @@ from . import (
|
||||
ATTR_COUNT,
|
||||
ATTR_MODE_MUSIC,
|
||||
ATTR_TRANSITIONS,
|
||||
BULB_EXCEPTIONS,
|
||||
CONF_FLOW_PARAMS,
|
||||
CONF_MODE_MUSIC,
|
||||
CONF_NIGHTLIGHT_SWITCH,
|
||||
@@ -241,7 +242,7 @@ def _async_cmd(func):
|
||||
try:
|
||||
_LOGGER.debug("Calling %s with %s %s", func, args, kwargs)
|
||||
return await func(self, *args, **kwargs)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Error when calling %s: %s", func, ex)
|
||||
|
||||
return _async_wrap
|
||||
@@ -678,7 +679,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
flow = Flow(count=count, transitions=transitions)
|
||||
try:
|
||||
await self._bulb.async_start_flow(flow, light_type=self.light_type)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set flash: %s", ex)
|
||||
|
||||
@_async_cmd
|
||||
@@ -709,7 +710,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
try:
|
||||
await self._bulb.async_start_flow(flow, light_type=self.light_type)
|
||||
self._effect = effect
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set effect: %s", ex)
|
||||
|
||||
async def async_turn_on(self, **kwargs) -> None:
|
||||
@@ -737,7 +738,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
await self.hass.async_add_executor_job(
|
||||
self.set_music_mode, self.config[CONF_MODE_MUSIC]
|
||||
)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error(
|
||||
"Unable to turn on music mode, consider disabling it: %s", ex
|
||||
)
|
||||
@@ -750,7 +751,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
await self.async_set_brightness(brightness, duration)
|
||||
await self.async_set_flash(flash)
|
||||
await self.async_set_effect(effect)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set bulb properties: %s", ex)
|
||||
return
|
||||
|
||||
@@ -758,10 +759,14 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
if self.config[CONF_SAVE_ON_CHANGE] and (brightness or colortemp or rgb):
|
||||
try:
|
||||
await self.async_set_default()
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set the defaults: %s", ex)
|
||||
return
|
||||
|
||||
# Some devices (mainly nightlights) will not send back the on state so we need to force a refresh
|
||||
if not self.is_on:
|
||||
await self.device.async_update(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs) -> None:
|
||||
"""Turn off."""
|
||||
if not self.is_on:
|
||||
@@ -772,12 +777,15 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
|
||||
|
||||
await self.device.async_turn_off(duration=duration, light_type=self.light_type)
|
||||
# Some devices will not send back the off state so we need to force a refresh
|
||||
if self.is_on:
|
||||
await self.device.async_update(True)
|
||||
|
||||
async def async_set_mode(self, mode: str):
|
||||
"""Set a power mode."""
|
||||
try:
|
||||
await self._bulb.async_set_power_mode(PowerMode[mode.upper()])
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set the power mode: %s", ex)
|
||||
|
||||
async def async_start_flow(self, transitions, count=0, action=ACTION_RECOVER):
|
||||
@@ -788,7 +796,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
)
|
||||
|
||||
await self._bulb.async_start_flow(flow, light_type=self.light_type)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set effect: %s", ex)
|
||||
|
||||
async def async_set_scene(self, scene_class, *args):
|
||||
@@ -799,7 +807,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
"""
|
||||
try:
|
||||
await self._bulb.async_set_scene(scene_class, *args)
|
||||
except BulbException as ex:
|
||||
except BULB_EXCEPTIONS as ex:
|
||||
_LOGGER.error("Unable to set scene: %s", ex)
|
||||
|
||||
|
||||
@@ -852,7 +860,12 @@ class YeelightColorLightWithoutNightlightSwitch(
|
||||
|
||||
@property
|
||||
def _brightness_property(self):
|
||||
return "current_brightness"
|
||||
# If the nightlight is not active, we do not
|
||||
# want to "current_brightness" since it will check
|
||||
# "bg_power" and main light could still be on
|
||||
if self.device.is_nightlight_enabled:
|
||||
return "current_brightness"
|
||||
return super()._brightness_property
|
||||
|
||||
|
||||
class YeelightColorLightWithNightlightSwitch(
|
||||
@@ -876,7 +889,12 @@ class YeelightWhiteTempWithoutNightlightSwitch(
|
||||
|
||||
@property
|
||||
def _brightness_property(self):
|
||||
return "current_brightness"
|
||||
# If the nightlight is not active, we do not
|
||||
# want to "current_brightness" since it will check
|
||||
# "bg_power" and main light could still be on
|
||||
if self.device.is_nightlight_enabled:
|
||||
return "current_brightness"
|
||||
return super()._brightness_property
|
||||
|
||||
|
||||
class YeelightWithNightLight(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "yeelight",
|
||||
"name": "Yeelight",
|
||||
"documentation": "https://www.home-assistant.io/integrations/yeelight",
|
||||
"requirements": ["yeelight==0.7.3", "async-upnp-client==0.20.0"],
|
||||
"requirements": ["yeelight==0.7.4", "async-upnp-client==0.20.0"],
|
||||
"codeowners": ["@rytilahti", "@zewelor", "@shenxn", "@starkillerOG"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
|
||||
@@ -37,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
_LOGGER,
|
||||
name="youless_gateway",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=2),
|
||||
update_interval=timedelta(seconds=10),
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
@@ -82,14 +82,6 @@ class YoulessBaseSensor(CoordinatorEntity, SensorEntity):
|
||||
"""Property to get the underlying sensor object."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement for the sensor."""
|
||||
if self.get_sensor is None:
|
||||
return None
|
||||
|
||||
return self.get_sensor.unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Determine the state value, only if a sensor is initialized."""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "zeroconf",
|
||||
"name": "Zero-configuration networking (zeroconf)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/zeroconf",
|
||||
"requirements": ["zeroconf==0.36.0"],
|
||||
"requirements": ["zeroconf==0.36.2"],
|
||||
"dependencies": ["network", "api"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"quality_scale": "internal",
|
||||
|
||||
@@ -25,6 +25,7 @@ SUPPORTED_PORT_SETTINGS = (
|
||||
CONF_BAUDRATE,
|
||||
CONF_FLOWCONTROL,
|
||||
)
|
||||
DECONZ_DOMAIN = "deconz"
|
||||
|
||||
|
||||
class ZhaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -36,7 +37,6 @@ class ZhaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Initialize flow instance."""
|
||||
self._device_path = None
|
||||
self._radio_type = None
|
||||
self._auto_detected_data = None
|
||||
self._title = None
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
@@ -121,18 +121,12 @@ class ZhaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# we ignore the usb discovery as they probably
|
||||
# want to use it there instead
|
||||
for flow in self.hass.config_entries.flow.async_progress():
|
||||
if flow["handler"] == "deconz":
|
||||
if flow["handler"] == DECONZ_DOMAIN:
|
||||
return self.async_abort(reason="not_zha_device")
|
||||
for entry in self.hass.config_entries.async_entries(DECONZ_DOMAIN):
|
||||
if entry.source != config_entries.SOURCE_IGNORE:
|
||||
return self.async_abort(reason="not_zha_device")
|
||||
|
||||
# The Nortek sticks are a special case since they
|
||||
# have a Z-Wave and a Zigbee radio. We need to reject
|
||||
# the Z-Wave radio.
|
||||
if vid == "10C4" and pid == "8A2A" and "ZigBee" not in description:
|
||||
return self.async_abort(reason="not_zha_device")
|
||||
|
||||
self._auto_detected_data = await detect_radios(dev_path)
|
||||
if self._auto_detected_data is None:
|
||||
return self.async_abort(reason="not_zha_device")
|
||||
self._device_path = dev_path
|
||||
self._title = usb.human_readable_device_name(
|
||||
dev_path,
|
||||
@@ -149,9 +143,15 @@ class ZhaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_confirm(self, user_input=None):
|
||||
"""Confirm a discovery."""
|
||||
if user_input is not None:
|
||||
auto_detected_data = await detect_radios(self._device_path)
|
||||
if auto_detected_data is None:
|
||||
# This probably will not happen how they have
|
||||
# have very specific usb matching, but there could
|
||||
# be a problem with the device
|
||||
return self.async_abort(reason="usb_probe_failed")
|
||||
return self.async_create_entry(
|
||||
title=self._title,
|
||||
data=self._auto_detected_data,
|
||||
data=auto_detected_data,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -4,22 +4,21 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zha",
|
||||
"requirements": [
|
||||
"bellows==0.26.0",
|
||||
"bellows==0.27.0",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.5",
|
||||
"zha-quirks==0.0.59",
|
||||
"zha-quirks==0.0.60",
|
||||
"zigpy-cc==0.5.2",
|
||||
"zigpy-deconz==0.12.1",
|
||||
"zigpy==0.36.1",
|
||||
"zigpy-xbee==0.13.0",
|
||||
"zigpy-deconz==0.13.0",
|
||||
"zigpy==0.37.1",
|
||||
"zigpy-xbee==0.14.0",
|
||||
"zigpy-zigate==0.7.3",
|
||||
"zigpy-znp==0.5.3"
|
||||
"zigpy-znp==0.5.4"
|
||||
],
|
||||
"usb": [
|
||||
{"vid":"10C4","pid":"EA60","known_devices":["slae.sh cc2652rb stick"]},
|
||||
{"vid":"1CF1","pid":"0030","known_devices":["Conbee II"]},
|
||||
{"vid":"1A86","pid":"7523","known_devices":["Electrolama zig-a-zig-ah"]},
|
||||
{"vid":"10C4","pid":"8A2A","known_devices":["Nortek HUSBZB-1"]}
|
||||
{"vid":"10C4","pid":"EA60","description":"*2652*","known_devices":["slae.sh cc2652rb stick"]},
|
||||
{"vid":"1CF1","pid":"0030","description":"*conbee*","known_devices":["Conbee II"]},
|
||||
{"vid":"10C4","pid":"8A2A","description":"*zigbee*","known_devices":["Nortek HUSBZB-1"]}
|
||||
],
|
||||
"codeowners": ["@dmulcahey", "@adminiuga"],
|
||||
"zeroconf": [
|
||||
|
||||
@@ -30,7 +30,8 @@
|
||||
},
|
||||
"abort": {
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"not_zha_device": "This device is not a zha device"
|
||||
"not_zha_device": "This device is not a zha device",
|
||||
"usb_probe_failed": "Failed to probe the usb device"
|
||||
}
|
||||
},
|
||||
"config_panel": {
|
||||
|
||||
@@ -145,7 +145,7 @@ async def async_setup_entry( # noqa: C901
|
||||
value_updates_disc_info: dict[str, ZwaveDiscoveryInfo] = {}
|
||||
|
||||
# run discovery on all node values and create/update entities
|
||||
for disc_info in async_discover_values(node):
|
||||
for disc_info in async_discover_values(node, device):
|
||||
platform = disc_info.platform
|
||||
|
||||
# This migration logic was added in 2021.3 to handle a breaking change to
|
||||
|
||||
@@ -4,14 +4,14 @@ from __future__ import annotations
|
||||
from typing import Any, cast
|
||||
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import (
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.thermostat import (
|
||||
THERMOSTAT_CURRENT_TEMP_PROPERTY,
|
||||
THERMOSTAT_MODE_PROPERTY,
|
||||
THERMOSTAT_MODE_SETPOINT_MAP,
|
||||
THERMOSTAT_MODES,
|
||||
THERMOSTAT_OPERATING_STATE_PROPERTY,
|
||||
THERMOSTAT_SETPOINT_PROPERTY,
|
||||
CommandClass,
|
||||
ThermostatMode,
|
||||
ThermostatOperatingState,
|
||||
ThermostatSetpointType,
|
||||
|
||||
@@ -96,3 +96,5 @@ ENTITY_DESC_KEY_SIGNAL_STRENGTH = "signal_strength"
|
||||
ENTITY_DESC_KEY_TEMPERATURE = "temperature"
|
||||
ENTITY_DESC_KEY_TARGET_TEMPERATURE = "target_temperature"
|
||||
ENTITY_DESC_KEY_TIMESTAMP = "timestamp"
|
||||
ENTITY_DESC_KEY_MEASUREMENT = "measurement"
|
||||
ENTITY_DESC_KEY_TOTAL_INCREASING = "total_increasing"
|
||||
|
||||
@@ -5,7 +5,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import BarrierState
|
||||
from zwave_js_server.const.command_class.barrior_operator import BarrierState
|
||||
from zwave_js_server.model.value import Value as ZwaveValue
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
|
||||
@@ -6,13 +6,17 @@ from dataclasses import asdict, dataclass, field
|
||||
from typing import Any
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
from zwave_js_server.const import THERMOSTAT_CURRENT_TEMP_PROPERTY, CommandClass
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.thermostat import (
|
||||
THERMOSTAT_CURRENT_TEMP_PROPERTY,
|
||||
)
|
||||
from zwave_js_server.exceptions import UnknownValueData
|
||||
from zwave_js_server.model.device_class import DeviceClassItem
|
||||
from zwave_js_server.model.node import Node as ZwaveNode
|
||||
from zwave_js_server.model.value import Value as ZwaveValue
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import LOGGER
|
||||
from .discovery_data_template import (
|
||||
@@ -667,7 +671,9 @@ DISCOVERY_SCHEMAS = [
|
||||
|
||||
|
||||
@callback
|
||||
def async_discover_values(node: ZwaveNode) -> Generator[ZwaveDiscoveryInfo, None, None]:
|
||||
def async_discover_values(
|
||||
node: ZwaveNode, device: DeviceEntry
|
||||
) -> Generator[ZwaveDiscoveryInfo, None, None]:
|
||||
"""Run discovery on ZWave node and return matching (primary) values."""
|
||||
for value in node.values.values():
|
||||
for schema in DISCOVERY_SCHEMAS:
|
||||
@@ -758,7 +764,11 @@ def async_discover_values(node: ZwaveNode) -> Generator[ZwaveDiscoveryInfo, None
|
||||
resolved_data = schema.data_template.resolve_data(value)
|
||||
except UnknownValueData as err:
|
||||
LOGGER.error(
|
||||
"Discovery for value %s will be skipped: %s", value, err
|
||||
"Discovery for value %s on device '%s' (%s) will be skipped: %s",
|
||||
value,
|
||||
device.name_by_user or device.name,
|
||||
node,
|
||||
err,
|
||||
)
|
||||
continue
|
||||
additional_value_ids_to_watch = schema.data_template.value_ids_to_watch(
|
||||
|
||||
@@ -5,26 +5,29 @@ from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from zwave_js_server.const import (
|
||||
CO2_SENSORS,
|
||||
CO_SENSORS,
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.meter import (
|
||||
CURRENT_METER_TYPES,
|
||||
CURRENT_SENSORS,
|
||||
ENERGY_METER_TYPES,
|
||||
ENERGY_SENSORS,
|
||||
HUMIDITY_SENSORS,
|
||||
ILLUMINANCE_SENSORS,
|
||||
ENERGY_TOTAL_INCREASING_METER_TYPES,
|
||||
POWER_FACTOR_METER_TYPES,
|
||||
POWER_METER_TYPES,
|
||||
VOLTAGE_METER_TYPES,
|
||||
ElectricScale,
|
||||
MeterScaleType,
|
||||
)
|
||||
from zwave_js_server.const.command_class.multilevel_sensor import (
|
||||
CO2_SENSORS,
|
||||
CO_SENSORS,
|
||||
CURRENT_SENSORS,
|
||||
ENERGY_MEASUREMENT_SENSORS,
|
||||
HUMIDITY_SENSORS,
|
||||
ILLUMINANCE_SENSORS,
|
||||
POWER_SENSORS,
|
||||
PRESSURE_SENSORS,
|
||||
SIGNAL_STRENGTH_SENSORS,
|
||||
TEMPERATURE_SENSORS,
|
||||
TIMESTAMP_SENSORS,
|
||||
VOLTAGE_METER_TYPES,
|
||||
VOLTAGE_SENSORS,
|
||||
CommandClass,
|
||||
MeterScaleType,
|
||||
MultilevelSensorType,
|
||||
)
|
||||
from zwave_js_server.model.node import Node as ZwaveNode
|
||||
@@ -43,6 +46,7 @@ from .const import (
|
||||
ENTITY_DESC_KEY_ENERGY_TOTAL_INCREASING,
|
||||
ENTITY_DESC_KEY_HUMIDITY,
|
||||
ENTITY_DESC_KEY_ILLUMINANCE,
|
||||
ENTITY_DESC_KEY_MEASUREMENT,
|
||||
ENTITY_DESC_KEY_POWER,
|
||||
ENTITY_DESC_KEY_POWER_FACTOR,
|
||||
ENTITY_DESC_KEY_PRESSURE,
|
||||
@@ -50,13 +54,14 @@ from .const import (
|
||||
ENTITY_DESC_KEY_TARGET_TEMPERATURE,
|
||||
ENTITY_DESC_KEY_TEMPERATURE,
|
||||
ENTITY_DESC_KEY_TIMESTAMP,
|
||||
ENTITY_DESC_KEY_TOTAL_INCREASING,
|
||||
ENTITY_DESC_KEY_VOLTAGE,
|
||||
)
|
||||
|
||||
METER_DEVICE_CLASS_MAP: dict[str, set[MeterScaleType]] = {
|
||||
ENTITY_DESC_KEY_CURRENT: CURRENT_METER_TYPES,
|
||||
ENTITY_DESC_KEY_VOLTAGE: VOLTAGE_METER_TYPES,
|
||||
ENTITY_DESC_KEY_ENERGY_TOTAL_INCREASING: ENERGY_METER_TYPES,
|
||||
ENTITY_DESC_KEY_ENERGY_TOTAL_INCREASING: ENERGY_TOTAL_INCREASING_METER_TYPES,
|
||||
ENTITY_DESC_KEY_POWER: POWER_METER_TYPES,
|
||||
ENTITY_DESC_KEY_POWER_FACTOR: POWER_FACTOR_METER_TYPES,
|
||||
}
|
||||
@@ -65,7 +70,7 @@ MULTILEVEL_SENSOR_DEVICE_CLASS_MAP: dict[str, set[MultilevelSensorType]] = {
|
||||
ENTITY_DESC_KEY_CO: CO_SENSORS,
|
||||
ENTITY_DESC_KEY_CO2: CO2_SENSORS,
|
||||
ENTITY_DESC_KEY_CURRENT: CURRENT_SENSORS,
|
||||
ENTITY_DESC_KEY_ENERGY_MEASUREMENT: ENERGY_SENSORS,
|
||||
ENTITY_DESC_KEY_ENERGY_MEASUREMENT: ENERGY_MEASUREMENT_SENSORS,
|
||||
ENTITY_DESC_KEY_HUMIDITY: HUMIDITY_SENSORS,
|
||||
ENTITY_DESC_KEY_ILLUMINANCE: ILLUMINANCE_SENSORS,
|
||||
ENTITY_DESC_KEY_POWER: POWER_SENSORS,
|
||||
@@ -187,6 +192,19 @@ class NumericSensorDataTemplate(BaseDiscoverySchemaDataTemplate):
|
||||
|
||||
if value.command_class == CommandClass.METER:
|
||||
scale_type = get_meter_scale_type(value)
|
||||
# We do this because even though these are energy scales, they don't meet
|
||||
# the unit requirements for the energy device class.
|
||||
if scale_type in (
|
||||
ElectricScale.PULSE_COUNT,
|
||||
ElectricScale.KILOVOLT_AMPERE_HOUR,
|
||||
ElectricScale.KILOVOLT_AMPERE_REACTIVE_HOUR,
|
||||
):
|
||||
return ENTITY_DESC_KEY_TOTAL_INCREASING
|
||||
# We do this because even though these are power scales, they don't meet
|
||||
# the unit requirements for the energy power class.
|
||||
if scale_type == ElectricScale.KILOVOLT_AMPERE_REACTIVE:
|
||||
return ENTITY_DESC_KEY_MEASUREMENT
|
||||
|
||||
for key, scale_type_set in METER_DEVICE_CLASS_MAP.items():
|
||||
if scale_type in scale_type_set:
|
||||
return key
|
||||
|
||||
@@ -5,7 +5,8 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import ColorComponent, CommandClass
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.color_switch import ColorComponent
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
|
||||
@@ -6,12 +6,12 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import (
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.lock import (
|
||||
ATTR_CODE_SLOT,
|
||||
ATTR_USERCODE,
|
||||
LOCK_CMD_CLASS_TO_LOCKED_STATE_MAP,
|
||||
LOCK_CMD_CLASS_TO_PROPERTY_MAP,
|
||||
CommandClass,
|
||||
DoorLockMode,
|
||||
)
|
||||
from zwave_js_server.model.value import Value as ZwaveValue
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
"name": "Z-Wave JS",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zwave_js",
|
||||
"requirements": ["zwave-js-server-python==0.29.0"],
|
||||
"requirements": ["zwave-js-server-python==0.29.1"],
|
||||
"codeowners": ["@home-assistant/z-wave"],
|
||||
"dependencies": ["usb", "http", "websocket_api"],
|
||||
"iot_class": "local_push",
|
||||
"usb": [
|
||||
{"vid":"0658","pid":"0200","known_devices":["Aeotec Z-Stick Gen5+", "Z-WaveMe UZB"]},
|
||||
{"vid":"10C4","pid":"8A2A","known_devices":["Nortek HUSBZB-1"]},
|
||||
{"vid":"10C4","pid":"EA60","known_devices":["Aeotec Z-Stick 7", "Silicon Labs UZB-7", "Zooz ZST10 700"]}
|
||||
{"vid":"0658","pid":"0200","known_devices":["Aeotec Z-Stick Gen5+", "Z-WaveMe UZB"]},
|
||||
{"vid":"10C4","pid":"8A2A","known_devices":["Nortek HUSBZB-1"]},
|
||||
{"vid":"10C4","pid":"EA60","known_devices":["Aeotec Z-Stick 7", "Silicon Labs UZB-7", "Zooz ZST10 700"]}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from zwave_js_server.client import Client as ZwaveClient
|
||||
from zwave_js_server.const import CommandClass, ToneID
|
||||
from zwave_js_server.const import CommandClass
|
||||
from zwave_js_server.const.command_class.sound_switch import ToneID
|
||||
|
||||
from homeassistant.components.select import DOMAIN as SELECT_DOMAIN, SelectEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user