forked from home-assistant/core
Compare commits
153 Commits
2023.3.0b2
...
2023.3.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca5a88342b | ||
|
|
117113cdfc | ||
|
|
174342860b | ||
|
|
a7b5a0297e | ||
|
|
406e92511b | ||
|
|
146347e31a | ||
|
|
3747fd5dcb | ||
|
|
53d400ca96 | ||
|
|
2a18261efb | ||
|
|
1f71068740 | ||
|
|
92fb978a03 | ||
|
|
127f2289a1 | ||
|
|
de6f55dcfb | ||
|
|
713d3025f2 | ||
|
|
1e03ff68a2 | ||
|
|
a5aa5c0c01 | ||
|
|
b6d001bfe6 | ||
|
|
7e18e15cac | ||
|
|
e651ca747b | ||
|
|
9fa73fe3a9 | ||
|
|
abda7b8a5b | ||
|
|
90a4afb6fa | ||
|
|
52981699cf | ||
|
|
c3d7696c2e | ||
|
|
f120bac17f | ||
|
|
02738fb9d4 | ||
|
|
a9a6ff50cc | ||
|
|
fdd9c5383f | ||
|
|
d084e70aff | ||
|
|
69582b7ecb | ||
|
|
160518350f | ||
|
|
daa5718a80 | ||
|
|
f5562e93ac | ||
|
|
d2f90236d1 | ||
|
|
65c614421a | ||
|
|
22922da607 | ||
|
|
ca0304ffc4 | ||
|
|
950a1f6e9e | ||
|
|
1e7f58d859 | ||
|
|
7cb4620671 | ||
|
|
8c2569d2ce | ||
|
|
6ebd493c4d | ||
|
|
990ecbba72 | ||
|
|
ddde17606d | ||
|
|
3fba181e7b | ||
|
|
da79bf8534 | ||
|
|
83e2cc32b7 | ||
|
|
c7fb404a17 | ||
|
|
f1e114380a | ||
|
|
04e4a644cb | ||
|
|
e606c2e227 | ||
|
|
ebf95feff3 | ||
|
|
3dca4c2f23 | ||
|
|
3f8f38f2df | ||
|
|
0844a0b269 | ||
|
|
b65180d20a | ||
|
|
7f8a9697f0 | ||
|
|
563bd4a0dd | ||
|
|
29b5ef31c1 | ||
|
|
863f8b727d | ||
|
|
83ed8cf689 | ||
|
|
52cd2f9429 | ||
|
|
74d3b2374b | ||
|
|
f982af2412 | ||
|
|
0b5ddd9cbf | ||
|
|
8d1aa0132e | ||
|
|
d737b97c91 | ||
|
|
0fac12866d | ||
|
|
e3fe71f76e | ||
|
|
eba1bfad51 | ||
|
|
1a0a385e03 | ||
|
|
c9999cd08c | ||
|
|
8252aeead2 | ||
|
|
c27a69ef85 | ||
|
|
d4c28a1f4a | ||
|
|
322eb4bd83 | ||
|
|
f0f12fd14a | ||
|
|
1836e35717 | ||
|
|
4eb55146be | ||
|
|
b1ee6e304e | ||
|
|
d0b195516b | ||
|
|
a867f1d3c8 | ||
|
|
f7eaeb7a39 | ||
|
|
3e961d3e17 | ||
|
|
c28e16fa8b | ||
|
|
e2e8d74aa6 | ||
|
|
8a9fbd650a | ||
|
|
243725efe3 | ||
|
|
8d59489da8 | ||
|
|
c146413a1a | ||
|
|
a46d63a11b | ||
|
|
db4f6fb94d | ||
|
|
c50c920589 | ||
|
|
fe22aa0b4b | ||
|
|
a0162e4986 | ||
|
|
62c5cf51f5 | ||
|
|
89aebba3ab | ||
|
|
6c73b9024b | ||
|
|
59a9ace171 | ||
|
|
e751948bc8 | ||
|
|
702646427d | ||
|
|
8a605b1377 | ||
|
|
8eb8415d3f | ||
|
|
9f3f71d0c3 | ||
|
|
b82da9418d | ||
|
|
38cf725075 | ||
|
|
04cedab8d4 | ||
|
|
2238a3f201 | ||
|
|
f58ca17926 | ||
|
|
d5e517b874 | ||
|
|
f9eeb4f4d8 | ||
|
|
86d5e4aaa8 | ||
|
|
a56935ed7c | ||
|
|
fc56c958c3 | ||
|
|
a8e1dc8962 | ||
|
|
32b138b6c6 | ||
|
|
2112c66804 | ||
|
|
72c0526d87 | ||
|
|
9ed4e01e94 | ||
|
|
dcf1ecfeb5 | ||
|
|
b72224ceff | ||
|
|
96ad5c9666 | ||
|
|
00b59c142a | ||
|
|
b054c81e13 | ||
|
|
b0cbcad440 | ||
|
|
bafe552af6 | ||
|
|
d399855e50 | ||
|
|
d26f430766 | ||
|
|
f2e4943a53 | ||
|
|
6512cd901f | ||
|
|
fbe1524f6c | ||
|
|
95e337277c | ||
|
|
1503674bd6 | ||
|
|
ab6bd75b70 | ||
|
|
2fff836bd4 | ||
|
|
d8850758f1 | ||
|
|
0449856064 | ||
|
|
e48089e0c9 | ||
|
|
a7e081f70d | ||
|
|
fe181425d8 | ||
|
|
8c7b29db25 | ||
|
|
aaa5bb9f86 | ||
|
|
5b78e0c4ff | ||
|
|
2063dbf00d | ||
|
|
91a03ab83d | ||
|
|
ed8f538890 | ||
|
|
6196607c5d | ||
|
|
833ccafb76 | ||
|
|
ca539d0a09 | ||
|
|
0e3e954000 | ||
|
|
4ef96c76e4 | ||
|
|
d5b0c1faa0 | ||
|
|
2405908cdd |
@@ -1100,6 +1100,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
|
||||
5
homeassistant/brands/heltun.json
Normal file
5
homeassistant/brands/heltun.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heltun",
|
||||
"name": "HELTUN",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -68,7 +68,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="co",
|
||||
name="CO",
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("co"),
|
||||
@@ -289,7 +288,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc",
|
||||
name="VOC",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc"),
|
||||
@@ -297,7 +295,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc_ionsc",
|
||||
name="VOC (Industrial)",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc_ionsc"),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Rest API for Home Assistant."""
|
||||
import asyncio
|
||||
from functools import lru_cache
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
@@ -350,6 +351,12 @@ class APIComponentsView(HomeAssistantView):
|
||||
return self.json(request.app["hass"].config.components)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: ha.HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
class APITemplateView(HomeAssistantView):
|
||||
"""View to handle Template requests."""
|
||||
|
||||
@@ -362,7 +369,7 @@ class APITemplateView(HomeAssistantView):
|
||||
raise Unauthorized()
|
||||
try:
|
||||
data = await request.json()
|
||||
tpl = template.Template(data["template"], request.app["hass"])
|
||||
tpl = _cached_template(data["template"], request.app["hass"])
|
||||
return tpl.async_render(variables=data.get("variables"), parse_result=False)
|
||||
except (ValueError, TemplateError) as ex:
|
||||
return self.json_message(
|
||||
|
||||
@@ -180,7 +180,7 @@ class ArestData:
|
||||
self._resource = resource
|
||||
self._pin = pin
|
||||
self.data = {}
|
||||
self._attr_available = True
|
||||
self.available = True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
@@ -201,7 +201,7 @@ class ArestData:
|
||||
f"{self._resource}/digital/{self._pin}", timeout=10
|
||||
)
|
||||
self.data = {"value": response.json()["return_value"]}
|
||||
self._attr_available = True
|
||||
self.available = True
|
||||
except requests.exceptions.ConnectionError:
|
||||
_LOGGER.error("No route to device %s", self._resource)
|
||||
self._attr_available = False
|
||||
self.available = False
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.3"]
|
||||
"requirements": ["yalexs==1.2.7", "yalexs-ble==2.1.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.2"]
|
||||
"requirements": ["auroranoaa==0.0.3"]
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ from .const import (
|
||||
DEFAULT_PROBABILITY_THRESHOLD,
|
||||
)
|
||||
from .helpers import Observation
|
||||
from .repairs import raise_mirrored_entries, raise_no_prob_given_false
|
||||
from .issues import raise_mirrored_entries, raise_no_prob_given_false
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Helpers for generating repairs."""
|
||||
"""Helpers for generating issues."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -106,6 +106,8 @@ class ActiveBluetoothDataUpdateCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
||||
@@ -99,6 +99,8 @@ class ActiveBluetoothProcessorCoordinator(
|
||||
|
||||
def needs_poll(self, service_info: BluetoothServiceInfoBleak) -> bool:
|
||||
"""Return true if time to try and poll."""
|
||||
if self.hass.is_stopping:
|
||||
return False
|
||||
poll_age: float | None = None
|
||||
if self._last_poll:
|
||||
poll_age = monotonic_time_coarse() - self._last_poll
|
||||
|
||||
@@ -227,20 +227,21 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
||||
self.hass, self._async_expire_devices, timedelta(seconds=30)
|
||||
)
|
||||
cancel_stop = self.hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP, self._save_history
|
||||
EVENT_HOMEASSISTANT_STOP, self._async_save_history
|
||||
)
|
||||
self._async_setup_scanner_watchdog()
|
||||
|
||||
@hass_callback
|
||||
def _cancel() -> None:
|
||||
self._save_history()
|
||||
self._async_save_history()
|
||||
self._async_stop_scanner_watchdog()
|
||||
cancel_track()
|
||||
cancel_stop()
|
||||
|
||||
return _cancel
|
||||
|
||||
def _save_history(self, event: Event | None = None) -> None:
|
||||
@hass_callback
|
||||
def _async_save_history(self, event: Event | None = None) -> None:
|
||||
"""Save the history."""
|
||||
self._storage.async_set_advertisement_history(
|
||||
self.source,
|
||||
@@ -252,6 +253,7 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
||||
),
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def _async_expire_devices(self, _datetime: datetime.datetime) -> None:
|
||||
"""Expire old devices."""
|
||||
now = MONOTONIC_TIME()
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bond_async"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bond-async==0.1.22"],
|
||||
"requirements": ["bond-async==0.1.23"],
|
||||
"zeroconf": ["_bond._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.1.1"]
|
||||
"requirements": ["caldav==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -257,9 +257,9 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
# This is available in the response template as "state".
|
||||
state1: core.State | None = None
|
||||
if intent_response.matched_states:
|
||||
state1 = intent_response.matched_states[0]
|
||||
state1 = matched[0]
|
||||
elif intent_response.unmatched_states:
|
||||
state1 = intent_response.unmatched_states[0]
|
||||
state1 = unmatched[0]
|
||||
|
||||
# Render response template
|
||||
speech = response_template.async_render(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.0.5", "home-assistant-intents==2023.2.22"]
|
||||
"requirements": ["hassil==1.0.6", "home-assistant-intents==2023.2.28"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pydeconz"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pydeconz==108"],
|
||||
"requirements": ["pydeconz==110"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics",
|
||||
|
||||
@@ -8,11 +8,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
@@ -135,7 +131,6 @@ class DerivativeSensor(RestoreEntity, SensorEntity):
|
||||
|
||||
_attr_icon = ICON
|
||||
_attr_should_poll = False
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import CONF_ASSOCIATION_DATA, DOMAIN, UPDATE_SECONDS
|
||||
from .models import DormakabaDkeyData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.LOCK, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.LOCK, Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -45,9 +45,10 @@ BINARY_SENSOR_DESCRIPTIONS = (
|
||||
),
|
||||
DormakabaDkeyBinarySensorDescription(
|
||||
key="security_locked",
|
||||
name="Dead bolt",
|
||||
name="Deadbolt",
|
||||
device_class=BinarySensorDeviceClass.LOCK,
|
||||
is_on=lambda state: state.unlock_status != UnlockStatus.SECURITY_LOCKED,
|
||||
is_on=lambda state: state.unlock_status
|
||||
not in (UnlockStatus.SECURITY_LOCKED, UnlockStatus.UNLOCKED_SECURITY_LOCKED),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -132,7 +132,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
association_data = await lock.associate(user_input["activation_code"])
|
||||
except BleakError:
|
||||
except BleakError as err:
|
||||
_LOGGER.warning("BleakError", exc_info=err)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except dkey_errors.InvalidActivationCode:
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dormakaba_dkey",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["py-dormakaba-dkey==1.0.2"]
|
||||
"requirements": ["py-dormakaba-dkey==1.0.4"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/easyenergy",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["easyenergy==0.1.2"]
|
||||
"requirements": ["easyenergy==0.2.2"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from random import randint
|
||||
|
||||
from enturclient import EnturPublicTransportData
|
||||
import voluptuous as vol
|
||||
@@ -22,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
API_CLIENT_NAME = "homeassistant-homeassistant"
|
||||
API_CLIENT_NAME = "homeassistant-{}"
|
||||
|
||||
CONF_STOP_IDS = "stop_ids"
|
||||
CONF_EXPAND_PLATFORMS = "expand_platforms"
|
||||
@@ -105,7 +106,7 @@ async def async_setup_platform(
|
||||
quays = [s for s in stop_ids if "Quay" in s]
|
||||
|
||||
data = EnturPublicTransportData(
|
||||
API_CLIENT_NAME,
|
||||
API_CLIENT_NAME.format(str(randint(100000, 999999))),
|
||||
stops=stops,
|
||||
quays=quays,
|
||||
line_whitelist=line_whitelist,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.28"]
|
||||
"requirements": ["env_canada==0.5.29"]
|
||||
}
|
||||
|
||||
@@ -130,10 +130,15 @@ class RuntimeEntryData:
|
||||
)
|
||||
self.ble_connections_free = free
|
||||
self.ble_connections_limit = limit
|
||||
if free:
|
||||
for fut in self._ble_connection_free_futures:
|
||||
if not free:
|
||||
return
|
||||
for fut in self._ble_connection_free_futures:
|
||||
# If wait_for_ble_connections_free gets cancelled, it will
|
||||
# leave a future in the list. We need to check if it's done
|
||||
# before setting the result.
|
||||
if not fut.done():
|
||||
fut.set_result(free)
|
||||
self._ble_connection_free_futures.clear()
|
||||
self._ble_connection_free_futures.clear()
|
||||
|
||||
async def wait_for_ble_connections_free(self) -> int:
|
||||
"""Wait until there are free BLE connections."""
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol"],
|
||||
"requirements": ["aioesphomeapi==13.4.1", "esphome-dashboard-api==1.2.3"],
|
||||
"requirements": ["aioesphomeapi==13.5.1", "esphome-dashboard-api==1.2.3"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -94,9 +94,9 @@ class FibaroCover(FibaroDevice, CoverEntity):
|
||||
"""Return if the cover is closed."""
|
||||
if self._is_open_close_only():
|
||||
state = self.fibaro_device.state
|
||||
if not state.has_value or state.str_value.lower() == "unknown":
|
||||
if not state.has_value or state.str_value().lower() == "unknown":
|
||||
return None
|
||||
return state.str_value.lower() == "closed"
|
||||
return state.str_value().lower() == "closed"
|
||||
|
||||
if self.current_cover_position is None:
|
||||
return None
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.6.8"]
|
||||
"requirements": ["pyfibaro==0.6.9"]
|
||||
}
|
||||
|
||||
@@ -87,14 +87,23 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass, STARTUP_SCAN_TIMEOUT
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
"""Run discovery in the background."""
|
||||
hass.async_create_background_task(_async_discovery(), "flux_led-discovery")
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
|
||||
)
|
||||
|
||||
async_trigger_discovery(hass, domain_data[FLUX_LED_DISCOVERY])
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_discovery)
|
||||
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED, _async_start_background_discovery
|
||||
)
|
||||
async_track_time_interval(
|
||||
hass, _async_start_background_discovery, DISCOVERY_INTERVAL
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -77,7 +77,6 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# Check permissions
|
||||
await fbx.system.get_config()
|
||||
await fbx.lan.get_hosts_list()
|
||||
await self.hass.async_block_till_done()
|
||||
|
||||
# Close connection
|
||||
await fbx.close()
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230224.0"]
|
||||
"requirements": ["home-assistant-frontend==20230309.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/garages_amsterdam",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["odp-amsterdam==5.0.1"]
|
||||
"requirements": ["odp-amsterdam==5.1.0"]
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusClimateZone(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] in GH_ZONES
|
||||
if z.data.get("type") in GH_ZONES
|
||||
]
|
||||
)
|
||||
|
||||
@@ -79,10 +79,10 @@ class GeniusClimateZone(GeniusHeatingZone, ClimateEntity):
|
||||
def hvac_action(self) -> str | None:
|
||||
"""Return the current running hvac operation if supported."""
|
||||
if "_state" in self._zone.data: # only for v3 API
|
||||
if self._zone.data["output"] == 1:
|
||||
return HVACAction.HEATING
|
||||
if not self._zone.data["_state"].get("bIsActive"):
|
||||
return HVACAction.OFF
|
||||
if self._zone.data["_state"].get("bOutRequestHeat"):
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
return None
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusSwitch(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] == GH_ON_OFF_ZONE
|
||||
if z.data.get("type") == GH_ON_OFF_ZONE
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusWaterHeater(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] in GH_HEATERS
|
||||
if z.data.get("type") in GH_HEATERS
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -832,7 +832,7 @@ class TemperatureControlTrait(_Trait):
|
||||
"temperatureUnitForUX": _google_temp_unit(
|
||||
self.hass.config.units.temperature_unit
|
||||
),
|
||||
"queryOnlyTemperatureSetting": True,
|
||||
"queryOnlyTemperatureControl": True,
|
||||
"temperatureRange": {
|
||||
"minThresholdCelsius": -100,
|
||||
"maxThresholdCelsius": 100,
|
||||
|
||||
@@ -342,12 +342,14 @@ def get_next_departure(
|
||||
origin_stop_time.departure_time
|
||||
LIMIT :limit
|
||||
"""
|
||||
result = schedule.engine.execute(
|
||||
result = schedule.engine.connect().execute(
|
||||
text(sql_query),
|
||||
origin_station_id=start_station_id,
|
||||
end_station_id=end_station_id,
|
||||
today=now_date,
|
||||
limit=limit,
|
||||
{
|
||||
"origin_station_id": start_station_id,
|
||||
"end_station_id": end_station_id,
|
||||
"today": now_date,
|
||||
"limit": limit,
|
||||
},
|
||||
)
|
||||
|
||||
# Create lookup timetable for today and possibly tomorrow, taking into
|
||||
@@ -357,7 +359,8 @@ def get_next_departure(
|
||||
yesterday_start = today_start = tomorrow_start = None
|
||||
yesterday_last = today_last = ""
|
||||
|
||||
for row in result:
|
||||
for row_cursor in result:
|
||||
row = row_cursor._asdict()
|
||||
if row["yesterday"] == 1 and yesterday_date >= row["start_date"]:
|
||||
extras = {"day": "yesterday", "first": None, "last": False}
|
||||
if yesterday_start is None:
|
||||
@@ -800,7 +803,10 @@ class GTFSDepartureSensor(SensorEntity):
|
||||
@staticmethod
|
||||
def dict_for_table(resource: Any) -> dict:
|
||||
"""Return a dictionary for the SQLAlchemy resource given."""
|
||||
return {col: getattr(resource, col) for col in resource.__table__.columns}
|
||||
_dict = {}
|
||||
for column in resource.__table__.columns:
|
||||
_dict[column.name] = str(getattr(resource, column.name))
|
||||
return _dict
|
||||
|
||||
def append_keys(self, resource: dict, prefix: str | None = None) -> None:
|
||||
"""Properly format key val pairs to append to attributes."""
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/harmony",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioharmony", "slixmpp"],
|
||||
"requirements": ["aioharmony==0.2.9"],
|
||||
"requirements": ["aioharmony==0.2.10"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Logitech",
|
||||
|
||||
@@ -96,7 +96,7 @@ from .handler import ( # noqa: F401
|
||||
)
|
||||
from .http import HassIOView
|
||||
from .ingress import async_setup_ingress_view
|
||||
from .repairs import SupervisorRepairs
|
||||
from .issues import SupervisorIssues
|
||||
from .websocket_api import async_load_websocket_api
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -123,7 +123,7 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
||||
DATA_ADDONS_CHANGELOGS = "hassio_addons_changelogs"
|
||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||
DATA_SUPERVISOR_REPAIRS = "supervisor_repairs"
|
||||
DATA_SUPERVISOR_ISSUES = "supervisor_issues"
|
||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
ADDONS_COORDINATOR = "hassio_addons_coordinator"
|
||||
@@ -581,9 +581,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"})
|
||||
)
|
||||
|
||||
# Start listening for problems with supervisor and making repairs
|
||||
hass.data[DATA_SUPERVISOR_REPAIRS] = repairs = SupervisorRepairs(hass, hassio)
|
||||
await repairs.setup()
|
||||
# Start listening for problems with supervisor and making issues
|
||||
hass.data[DATA_SUPERVISOR_ISSUES] = issues = SupervisorIssues(hass, hassio)
|
||||
await issues.setup()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ X_AUTH_TOKEN = "X-Supervisor-Token"
|
||||
X_INGRESS_PATH = "X-Ingress-Path"
|
||||
X_HASS_USER_ID = "X-Hass-User-ID"
|
||||
X_HASS_IS_ADMIN = "X-Hass-Is-Admin"
|
||||
X_HASS_SOURCE = "X-Hass-Source"
|
||||
|
||||
WS_TYPE = "type"
|
||||
WS_ID = "id"
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.const import SERVER_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .const import ATTR_DISCOVERY, DOMAIN
|
||||
from .const import ATTR_DISCOVERY, DOMAIN, X_HASS_SOURCE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -445,6 +445,8 @@ class HassIO:
|
||||
payload=None,
|
||||
timeout=10,
|
||||
return_text=False,
|
||||
*,
|
||||
source="core.handler",
|
||||
):
|
||||
"""Send API command to Hass.io.
|
||||
|
||||
@@ -458,7 +460,8 @@ class HassIO:
|
||||
headers={
|
||||
aiohttp.hdrs.AUTHORIZATION: (
|
||||
f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}"
|
||||
)
|
||||
),
|
||||
X_HASS_SOURCE: source,
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ from http import HTTPStatus
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
@@ -19,13 +20,16 @@ from aiohttp.hdrs import (
|
||||
TRANSFER_ENCODING,
|
||||
)
|
||||
from aiohttp.web_exceptions import HTTPBadGateway
|
||||
from multidict import istr
|
||||
|
||||
from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView
|
||||
from homeassistant.components.http import (
|
||||
KEY_AUTHENTICATED,
|
||||
KEY_HASS_USER,
|
||||
HomeAssistantView,
|
||||
)
|
||||
from homeassistant.components.onboarding import async_is_onboarded
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import X_HASS_IS_ADMIN, X_HASS_USER_ID
|
||||
from .const import X_HASS_SOURCE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,23 +38,53 @@ MAX_UPLOAD_SIZE = 1024 * 1024 * 1024
|
||||
# pylint: disable=implicit-str-concat
|
||||
NO_TIMEOUT = re.compile(
|
||||
r"^(?:"
|
||||
r"|homeassistant/update"
|
||||
r"|hassos/update"
|
||||
r"|hassos/update/cli"
|
||||
r"|supervisor/update"
|
||||
r"|addons/[^/]+/(?:update|install|rebuild)"
|
||||
r"|backups/.+/full"
|
||||
r"|backups/.+/partial"
|
||||
r"|backups/[^/]+/(?:upload|download)"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_AUTH_ONBOARDING = re.compile(r"^(?:" r"|supervisor/logs" r"|backups/[^/]+/.+" r")$")
|
||||
# fmt: off
|
||||
# Onboarding can upload backups and restore it
|
||||
PATHS_NOT_ONBOARDED = re.compile(
|
||||
r"^(?:"
|
||||
r"|backups/[a-f0-9]{8}(/info|/new/upload|/download|/restore/full|/restore/partial)?"
|
||||
r"|backups/new/upload"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_AUTH = re.compile(r"^(?:" r"|app/.*" r"|[store\/]*addons/[^/]+/(logo|icon)" r")$")
|
||||
# Authenticated users manage backups + download logs, changelog and documentation
|
||||
PATHS_ADMIN = re.compile(
|
||||
r"^(?:"
|
||||
r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?"
|
||||
r"|backups/new/upload"
|
||||
r"|audio/logs"
|
||||
r"|cli/logs"
|
||||
r"|core/logs"
|
||||
r"|dns/logs"
|
||||
r"|host/logs"
|
||||
r"|multicast/logs"
|
||||
r"|observer/logs"
|
||||
r"|supervisor/logs"
|
||||
r"|addons/[^/]+/(changelog|documentation|logs)"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_STORE = re.compile(r"^(?:" r"|app/entrypoint.js" r")$")
|
||||
# Unauthenticated requests come in for Supervisor panel + add-on images
|
||||
PATHS_NO_AUTH = re.compile(
|
||||
r"^(?:"
|
||||
r"|app/.*"
|
||||
r"|(store/)?addons/[^/]+/(logo|icon)"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_STORE = re.compile(
|
||||
r"^(?:"
|
||||
r"|app/entrypoint.js"
|
||||
r")$"
|
||||
)
|
||||
# pylint: enable=implicit-str-concat
|
||||
# fmt: on
|
||||
|
||||
|
||||
class HassIOView(HomeAssistantView):
|
||||
@@ -65,38 +99,66 @@ class HassIOView(HomeAssistantView):
|
||||
self._host = host
|
||||
self._websession = websession
|
||||
|
||||
async def _handle(
|
||||
self, request: web.Request, path: str
|
||||
) -> web.Response | web.StreamResponse:
|
||||
"""Route data to Hass.io."""
|
||||
hass = request.app["hass"]
|
||||
if _need_auth(hass, path) and not request[KEY_AUTHENTICATED]:
|
||||
return web.Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
return await self._command_proxy(path, request)
|
||||
|
||||
delete = _handle
|
||||
get = _handle
|
||||
post = _handle
|
||||
|
||||
async def _command_proxy(
|
||||
self, path: str, request: web.Request
|
||||
) -> web.StreamResponse:
|
||||
async def _handle(self, request: web.Request, path: str) -> web.StreamResponse:
|
||||
"""Return a client request with proxy origin for Hass.io supervisor.
|
||||
|
||||
This method is a coroutine.
|
||||
Use cases:
|
||||
- Onboarding allows restoring backups
|
||||
- Load Supervisor panel and add-on logo unauthenticated
|
||||
- User upload/restore backups
|
||||
"""
|
||||
headers = _init_header(request)
|
||||
if path == "backups/new/upload":
|
||||
# We need to reuse the full content type that includes the boundary
|
||||
headers[
|
||||
CONTENT_TYPE
|
||||
] = request._stored_content_type # pylint: disable=protected-access
|
||||
# No bullshit
|
||||
if path != unquote(path):
|
||||
return web.Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
hass: HomeAssistant = request.app["hass"]
|
||||
is_admin = request[KEY_AUTHENTICATED] and request[KEY_HASS_USER].is_admin
|
||||
authorized = is_admin
|
||||
|
||||
if is_admin:
|
||||
allowed_paths = PATHS_ADMIN
|
||||
|
||||
elif not async_is_onboarded(hass):
|
||||
allowed_paths = PATHS_NOT_ONBOARDED
|
||||
|
||||
# During onboarding we need the user to manage backups
|
||||
authorized = True
|
||||
|
||||
else:
|
||||
# Either unauthenticated or not an admin
|
||||
allowed_paths = PATHS_NO_AUTH
|
||||
|
||||
no_auth_path = PATHS_NO_AUTH.match(path)
|
||||
headers = {
|
||||
X_HASS_SOURCE: "core.http",
|
||||
}
|
||||
|
||||
if no_auth_path:
|
||||
if request.method != "GET":
|
||||
return web.Response(status=HTTPStatus.METHOD_NOT_ALLOWED)
|
||||
|
||||
else:
|
||||
if not allowed_paths.match(path):
|
||||
return web.Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
if authorized:
|
||||
headers[
|
||||
AUTHORIZATION
|
||||
] = f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}"
|
||||
|
||||
if request.method == "POST":
|
||||
headers[CONTENT_TYPE] = request.content_type
|
||||
# _stored_content_type is only computed once `content_type` is accessed
|
||||
if path == "backups/new/upload":
|
||||
# We need to reuse the full content type that includes the boundary
|
||||
headers[
|
||||
CONTENT_TYPE
|
||||
] = request._stored_content_type # pylint: disable=protected-access
|
||||
|
||||
try:
|
||||
client = await self._websession.request(
|
||||
method=request.method,
|
||||
url=f"http://{self._host}/{path}",
|
||||
url=f"http://{self._host}/{quote(path)}",
|
||||
params=request.query,
|
||||
data=request.content,
|
||||
headers=headers,
|
||||
@@ -123,20 +185,8 @@ class HassIOView(HomeAssistantView):
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
|
||||
def _init_header(request: web.Request) -> dict[istr, str]:
|
||||
"""Create initial header."""
|
||||
headers = {
|
||||
AUTHORIZATION: f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}",
|
||||
CONTENT_TYPE: request.content_type,
|
||||
}
|
||||
|
||||
# Add user data
|
||||
if request.get("hass_user") is not None:
|
||||
headers[istr(X_HASS_USER_ID)] = request["hass_user"].id
|
||||
headers[istr(X_HASS_IS_ADMIN)] = str(int(request["hass_user"].is_admin))
|
||||
|
||||
return headers
|
||||
get = _handle
|
||||
post = _handle
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse, path: str) -> dict[str, str]:
|
||||
@@ -164,12 +214,3 @@ def _get_timeout(path: str) -> ClientTimeout:
|
||||
if NO_TIMEOUT.match(path):
|
||||
return ClientTimeout(connect=10, total=None)
|
||||
return ClientTimeout(connect=10, total=300)
|
||||
|
||||
|
||||
def _need_auth(hass: HomeAssistant, path: str) -> bool:
|
||||
"""Return if a path need authentication."""
|
||||
if not async_is_onboarded(hass) and NO_AUTH_ONBOARDING.match(path):
|
||||
return False
|
||||
if NO_AUTH.match(path):
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -3,20 +3,22 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from functools import lru_cache
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import quote
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientTimeout, hdrs, web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPBadRequest
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import X_AUTH_TOKEN, X_INGRESS_PATH
|
||||
from .const import X_HASS_SOURCE, X_INGRESS_PATH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,9 +44,19 @@ class HassIOIngress(HomeAssistantView):
|
||||
self._host = host
|
||||
self._websession = websession
|
||||
|
||||
@lru_cache
|
||||
def _create_url(self, token: str, path: str) -> str:
|
||||
"""Create URL to service."""
|
||||
return f"http://{self._host}/ingress/{token}/{path}"
|
||||
base_path = f"/ingress/{token}/"
|
||||
url = f"http://{self._host}{base_path}{quote(path)}"
|
||||
|
||||
try:
|
||||
if not URL(url).path.startswith(base_path):
|
||||
raise HTTPBadRequest()
|
||||
except ValueError as err:
|
||||
raise HTTPBadRequest() from err
|
||||
|
||||
return url
|
||||
|
||||
async def _handle(
|
||||
self, request: web.Request, token: str, path: str
|
||||
@@ -185,10 +197,8 @@ def _init_header(request: web.Request, token: str) -> CIMultiDict | dict[str, st
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Inject token / cleanup later on Supervisor
|
||||
headers[X_AUTH_TOKEN] = os.environ.get("SUPERVISOR_TOKEN", "")
|
||||
|
||||
# Ingress information
|
||||
headers[X_HASS_SOURCE] = "core.ingress"
|
||||
headers[X_INGRESS_PATH] = f"/api/hassio_ingress/{token}"
|
||||
|
||||
# Set X-Forwarded-For
|
||||
|
||||
@@ -70,11 +70,11 @@ UNHEALTHY_REASONS = {
|
||||
}
|
||||
|
||||
|
||||
class SupervisorRepairs:
|
||||
"""Create repairs from supervisor events."""
|
||||
class SupervisorIssues:
|
||||
"""Create issues from supervisor events."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, client: HassIO) -> None:
|
||||
"""Initialize supervisor repairs."""
|
||||
"""Initialize supervisor issues."""
|
||||
self._hass = hass
|
||||
self._client = client
|
||||
self._unsupported_reasons: set[str] = set()
|
||||
@@ -87,7 +87,7 @@ class SupervisorRepairs:
|
||||
|
||||
@unhealthy_reasons.setter
|
||||
def unhealthy_reasons(self, reasons: set[str]) -> None:
|
||||
"""Set unhealthy reasons. Create or delete repairs as necessary."""
|
||||
"""Set unhealthy reasons. Create or delete issues as necessary."""
|
||||
for unhealthy in reasons - self.unhealthy_reasons:
|
||||
if unhealthy in UNHEALTHY_REASONS:
|
||||
translation_key = f"unhealthy_{unhealthy}"
|
||||
@@ -119,7 +119,7 @@ class SupervisorRepairs:
|
||||
|
||||
@unsupported_reasons.setter
|
||||
def unsupported_reasons(self, reasons: set[str]) -> None:
|
||||
"""Set unsupported reasons. Create or delete repairs as necessary."""
|
||||
"""Set unsupported reasons. Create or delete issues as necessary."""
|
||||
for unsupported in reasons - UNSUPPORTED_SKIP_REPAIR - self.unsupported_reasons:
|
||||
if unsupported in UNSUPPORTED_REASONS:
|
||||
translation_key = f"unsupported_{unsupported}"
|
||||
@@ -149,18 +149,18 @@ class SupervisorRepairs:
|
||||
await self.update()
|
||||
|
||||
async_dispatcher_connect(
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_repairs
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_issues
|
||||
)
|
||||
|
||||
async def update(self) -> None:
|
||||
"""Update repairs from Supervisor resolution center."""
|
||||
"""Update issuess from Supervisor resolution center."""
|
||||
data = await self._client.get_resolution_info()
|
||||
self.unhealthy_reasons = set(data[ATTR_UNHEALTHY])
|
||||
self.unsupported_reasons = set(data[ATTR_UNSUPPORTED])
|
||||
|
||||
@callback
|
||||
def _supervisor_events_to_repairs(self, event: dict[str, Any]) -> None:
|
||||
"""Create repairs from supervisor events."""
|
||||
def _supervisor_events_to_issues(self, event: dict[str, Any]) -> None:
|
||||
"""Create issues from supervisor events."""
|
||||
if ATTR_WS_EVENT not in event:
|
||||
return
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "hassio",
|
||||
"name": "Home Assistant Supervisor",
|
||||
"after_dependencies": ["panel_custom"],
|
||||
"codeowners": ["@home-assistant/supervisor"],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
|
||||
@@ -116,6 +116,7 @@ async def websocket_supervisor_api(
|
||||
method=msg[ATTR_METHOD],
|
||||
timeout=msg.get(ATTR_TIMEOUT, 10),
|
||||
payload=msg.get(ATTR_DATA, {}),
|
||||
source="core.websocket_api",
|
||||
)
|
||||
|
||||
if result.get(ATTR_RESULT) == "error":
|
||||
|
||||
@@ -421,6 +421,7 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
"""Get the latest state from the service."""
|
||||
try:
|
||||
await self._device.refresh()
|
||||
self._attr_available = True
|
||||
except (
|
||||
aiosomecomfort.SomeComfortError,
|
||||
OSError,
|
||||
@@ -428,8 +429,10 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
try:
|
||||
await self._data.client.login()
|
||||
|
||||
except aiosomecomfort.SomeComfortError:
|
||||
except aiosomecomfort.AuthError:
|
||||
self._attr_available = False
|
||||
await self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self._data.entry_id)
|
||||
)
|
||||
except aiosomecomfort.SomeComfortError:
|
||||
self._attr_available = False
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["aiosomecomfort==0.0.8"]
|
||||
"requirements": ["aiosomecomfort==0.0.11"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,13 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "The Honeywell integration needs to re-authenticate your account",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -60,9 +60,7 @@ def async_sign_path(
|
||||
|
||||
url = URL(path)
|
||||
now = dt_util.utcnow()
|
||||
params = dict(sorted(url.query.items()))
|
||||
for param in SAFE_QUERY_PARAMS:
|
||||
params.pop(param, None)
|
||||
params = [itm for itm in url.query.items() if itm[0] not in SAFE_QUERY_PARAMS]
|
||||
encoded = jwt.encode(
|
||||
{
|
||||
"iss": refresh_token_id,
|
||||
@@ -75,7 +73,7 @@ def async_sign_path(
|
||||
algorithm="HS256",
|
||||
)
|
||||
|
||||
params[SIGN_QUERY_PARAM] = encoded
|
||||
params.append((SIGN_QUERY_PARAM, encoded))
|
||||
url = url.with_query(params)
|
||||
return f"{url.path}?{url.query_string}"
|
||||
|
||||
@@ -184,10 +182,11 @@ async def async_setup_auth(hass: HomeAssistant, app: Application) -> None:
|
||||
if claims["path"] != request.path:
|
||||
return False
|
||||
|
||||
params = dict(sorted(request.query.items()))
|
||||
del params[SIGN_QUERY_PARAM]
|
||||
for param in SAFE_QUERY_PARAMS:
|
||||
params.pop(param, None)
|
||||
params = [
|
||||
list(itm) # claims stores tuples as lists
|
||||
for itm in request.query.items()
|
||||
if itm[0] not in SAFE_QUERY_PARAMS and itm[0] != SIGN_QUERY_PARAM
|
||||
]
|
||||
if claims["params"] != params:
|
||||
return False
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Application, HTTPBadRequest, Request, StreamResponse, middleware
|
||||
|
||||
@@ -39,18 +40,24 @@ FILTERS: Final = re.compile(
|
||||
def setup_security_filter(app: Application) -> None:
|
||||
"""Create security filter middleware for the app."""
|
||||
|
||||
def _recursive_unquote(value: str) -> str:
|
||||
"""Handle values that are encoded multiple times."""
|
||||
if (unquoted := unquote(value)) != value:
|
||||
unquoted = _recursive_unquote(unquoted)
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def security_filter_middleware(
|
||||
request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(request.path):
|
||||
"""Process request and block commonly known exploit attempts."""
|
||||
if FILTERS.search(_recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a potential harmful request to: %s", request.raw_path
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
if FILTERS.search(request.query_string):
|
||||
if FILTERS.search(_recursive_unquote(request.query_string)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a request with a potential harmful query string: %s",
|
||||
request.raw_path,
|
||||
|
||||
@@ -35,6 +35,7 @@ TRIGGER_TYPE = {
|
||||
"remote_double_button_long_press": "both {subtype} released after long press",
|
||||
"remote_double_button_short_press": "both {subtype} released",
|
||||
"initial_press": "{subtype} pressed initially",
|
||||
"long_press": "{subtype} long press",
|
||||
"repeat": "{subtype} held down",
|
||||
"short_release": "{subtype} released after short press",
|
||||
"long_release": "{subtype} released after long press",
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohue"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohue==4.6.1"],
|
||||
"requirements": ["aiohue==4.6.2"],
|
||||
"zeroconf": ["_hue._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -118,13 +118,14 @@ class HueSceneEntityBase(HueBaseEntity, SceneEntity):
|
||||
"""Return device (service) info."""
|
||||
# we create a virtual service/device for Hue scenes
|
||||
# so we have a parent for grouped lights and scenes
|
||||
group_type = self.group.type.value.title()
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.group.id)},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=self.group.metadata.name,
|
||||
manufacturer=self.bridge.api.config.bridge_device.product_data.manufacturer_name,
|
||||
model=self.group.type.value.title(),
|
||||
suggested_area=self.group.metadata.name,
|
||||
suggested_area=self.group.metadata.name if group_type == "Room" else None,
|
||||
via_device=(DOMAIN, self.bridge.api.config.bridge_device.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ DEFAULT_BUTTON_EVENT_TYPES = (
|
||||
ButtonEvent.INITIAL_PRESS,
|
||||
ButtonEvent.REPEAT,
|
||||
ButtonEvent.SHORT_RELEASE,
|
||||
ButtonEvent.LONG_PRESS,
|
||||
ButtonEvent.LONG_RELEASE,
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +55,13 @@ class HueBaseEntity(Entity):
|
||||
self._attr_unique_id = resource.id
|
||||
# device is precreated in main handler
|
||||
# this attaches the entity to the precreated device
|
||||
if self.device is not None:
|
||||
if self.device is None:
|
||||
# attach all device-less entities to the bridge itself
|
||||
# e.g. config based sensors like entertainment area
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, bridge.api.config.bridge.bridge_id)},
|
||||
)
|
||||
else:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.device.id)},
|
||||
)
|
||||
@@ -137,17 +143,14 @@ class HueBaseEntity(Entity):
|
||||
def _handle_event(self, event_type: EventType, resource: HueResource) -> None:
|
||||
"""Handle status event for this resource (or it's parent)."""
|
||||
if event_type == EventType.RESOURCE_DELETED:
|
||||
# remove any services created for zones/rooms
|
||||
# handle removal of room and zone 'virtual' devices/services
|
||||
# regular devices are removed automatically by the logic in device.py.
|
||||
if resource.type in (ResourceTypes.ROOM, ResourceTypes.ZONE):
|
||||
dev_reg = async_get_device_registry(self.hass)
|
||||
if device := dev_reg.async_get_device({(DOMAIN, resource.id)}):
|
||||
dev_reg.async_remove_device(device.id)
|
||||
if resource.type in (
|
||||
ResourceTypes.GROUPED_LIGHT,
|
||||
ResourceTypes.SCENE,
|
||||
ResourceTypes.SMART_SCENE,
|
||||
):
|
||||
# cleanup entities that are not strictly device-bound and have the bridge as parent
|
||||
if self.device is None:
|
||||
ent_reg = async_get_entity_registry(self.hass)
|
||||
ent_reg.async_remove(self.entity_id)
|
||||
return
|
||||
|
||||
@@ -153,6 +153,7 @@ async def async_setup_entry( # noqa: C901
|
||||
system.serial,
|
||||
svc_exception,
|
||||
)
|
||||
await system.aqualink.close()
|
||||
else:
|
||||
cur = system.online
|
||||
if cur and not prev:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable
|
||||
|
||||
import httpx
|
||||
from iaqualink.exception import AqualinkServiceException
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -12,5 +13,5 @@ async def await_or_reraise(awaitable: Awaitable) -> None:
|
||||
"""Execute API call while catching service exceptions."""
|
||||
try:
|
||||
await awaitable
|
||||
except AqualinkServiceException as svc_exception:
|
||||
except (AqualinkServiceException, httpx.HTTPError) as svc_exception:
|
||||
raise HomeAssistantError(f"Aqualink error: {svc_exception}") from svc_exception
|
||||
|
||||
@@ -77,7 +77,9 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int]):
|
||||
f"Invalid response for search '{self.config_entry.data[CONF_SEARCH]}': {result} / {lines[0]}"
|
||||
)
|
||||
if self.support_push:
|
||||
self.hass.async_create_task(self.async_wait_server_push())
|
||||
self.hass.async_create_background_task(
|
||||
self.async_wait_server_push(), "Wait for IMAP data push"
|
||||
)
|
||||
return len(lines[0].split())
|
||||
|
||||
async def async_wait_server_push(self) -> None:
|
||||
@@ -100,5 +102,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int]):
|
||||
async def shutdown(self, *_) -> None:
|
||||
"""Close resources."""
|
||||
if self.imap_client:
|
||||
if self.imap_client.has_pending_idle():
|
||||
self.imap_client.idle_done()
|
||||
await self.imap_client.stop_wait_server_push()
|
||||
await self.imap_client.logout()
|
||||
|
||||
@@ -95,9 +95,25 @@ class EmailReader:
|
||||
self._folder = folder
|
||||
self._verify_ssl = verify_ssl
|
||||
self._last_id = None
|
||||
self._last_message = None
|
||||
self._unread_ids = deque([])
|
||||
self.connection = None
|
||||
|
||||
@property
|
||||
def last_id(self) -> int | None:
|
||||
"""Return last email uid that was processed."""
|
||||
return self._last_id
|
||||
|
||||
@property
|
||||
def last_unread_id(self) -> int | None:
|
||||
"""Return last email uid received."""
|
||||
# We assume the last id in the list is the last unread id
|
||||
# We cannot know if that is the newest one, because it could arrive later
|
||||
# https://stackoverflow.com/questions/12409862/python-imap-the-order-of-uids
|
||||
if self._unread_ids:
|
||||
return int(self._unread_ids[-1])
|
||||
return self._last_id
|
||||
|
||||
def connect(self):
|
||||
"""Login and setup the connection."""
|
||||
ssl_context = client_context() if self._verify_ssl else None
|
||||
@@ -128,21 +144,21 @@ class EmailReader:
|
||||
try:
|
||||
self.connection.select(self._folder, readonly=True)
|
||||
|
||||
if not self._unread_ids:
|
||||
search = f"SINCE {datetime.date.today():%d-%b-%Y}"
|
||||
if self._last_id is not None:
|
||||
search = f"UID {self._last_id}:*"
|
||||
|
||||
_, data = self.connection.uid("search", None, search)
|
||||
self._unread_ids = deque(data[0].split())
|
||||
if self._last_id is None:
|
||||
# search for today and yesterday
|
||||
time_from = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
search = f"SINCE {time_from:%d-%b-%Y}"
|
||||
else:
|
||||
search = f"UID {self._last_id}:*"
|
||||
|
||||
_, data = self.connection.uid("search", None, search)
|
||||
self._unread_ids = deque(data[0].split())
|
||||
while self._unread_ids:
|
||||
message_uid = self._unread_ids.popleft()
|
||||
if self._last_id is None or int(message_uid) > self._last_id:
|
||||
self._last_id = int(message_uid)
|
||||
return self._fetch_message(message_uid)
|
||||
|
||||
return self._fetch_message(str(self._last_id))
|
||||
self._last_message = self._fetch_message(message_uid)
|
||||
return self._last_message
|
||||
|
||||
except imaplib.IMAP4.error:
|
||||
_LOGGER.info("Connection to %s lost, attempting to reconnect", self._server)
|
||||
@@ -254,22 +270,30 @@ class EmailContentSensor(SensorEntity):
|
||||
def update(self) -> None:
|
||||
"""Read emails and publish state change."""
|
||||
email_message = self._email_reader.read_next()
|
||||
while (
|
||||
self._last_id is None or self._last_id != self._email_reader.last_unread_id
|
||||
):
|
||||
if email_message is None:
|
||||
self._message = None
|
||||
self._state_attributes = {}
|
||||
return
|
||||
|
||||
if email_message is None:
|
||||
self._message = None
|
||||
self._state_attributes = {}
|
||||
return
|
||||
self._last_id = self._email_reader.last_id
|
||||
|
||||
if self.sender_allowed(email_message):
|
||||
message = EmailContentSensor.get_msg_subject(email_message)
|
||||
if self.sender_allowed(email_message):
|
||||
message = EmailContentSensor.get_msg_subject(email_message)
|
||||
|
||||
if self._value_template is not None:
|
||||
message = self.render_template(email_message)
|
||||
if self._value_template is not None:
|
||||
message = self.render_template(email_message)
|
||||
|
||||
self._message = message
|
||||
self._state_attributes = {
|
||||
ATTR_FROM: EmailContentSensor.get_msg_sender(email_message),
|
||||
ATTR_SUBJECT: EmailContentSensor.get_msg_subject(email_message),
|
||||
ATTR_DATE: email_message["Date"],
|
||||
ATTR_BODY: EmailContentSensor.get_msg_text(email_message),
|
||||
}
|
||||
self._message = message
|
||||
self._state_attributes = {
|
||||
ATTR_FROM: EmailContentSensor.get_msg_sender(email_message),
|
||||
ATTR_SUBJECT: EmailContentSensor.get_msg_subject(email_message),
|
||||
ATTR_DATE: email_message["Date"],
|
||||
ATTR_BODY: EmailContentSensor.get_msg_text(email_message),
|
||||
}
|
||||
|
||||
if self._last_id == self._email_reader.last_unread_id:
|
||||
break
|
||||
email_message = self._email_reader.read_next()
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyinsteon", "pypubsub"],
|
||||
"requirements": [
|
||||
"pyinsteon==1.3.2",
|
||||
"insteon-frontend-home-assistant==0.3.2"
|
||||
"pyinsteon==1.3.4",
|
||||
"insteon-frontend-home-assistant==0.3.3"
|
||||
],
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
"""Utilities used by insteon component."""
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from pyinsteon import devices
|
||||
from pyinsteon.address import Address
|
||||
from pyinsteon.constants import ALDBStatus, DeviceAction
|
||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT
|
||||
from pyinsteon.device_types.device_base import Device
|
||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT, Event
|
||||
from pyinsteon.managers.link_manager import (
|
||||
async_enter_linking_mode,
|
||||
async_enter_unlinking_mode,
|
||||
@@ -27,7 +29,7 @@ from homeassistant.const import (
|
||||
CONF_PLATFORM,
|
||||
ENTITY_MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
@@ -89,49 +91,52 @@ from .schemas import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_on_off_event_device(hass, device):
|
||||
def _register_event(event: Event, listener: Callable) -> None:
|
||||
"""Register the events raised by a device."""
|
||||
_LOGGER.debug(
|
||||
"Registering on/off event for %s %d %s",
|
||||
str(event.address),
|
||||
event.group,
|
||||
event.name,
|
||||
)
|
||||
event.subscribe(listener, force_strong_ref=True)
|
||||
|
||||
|
||||
def add_on_off_event_device(hass: HomeAssistant, device: Device) -> None:
|
||||
"""Register an Insteon device as an on/off event device."""
|
||||
|
||||
@callback
|
||||
def async_fire_group_on_off_event(name, address, group, button):
|
||||
def async_fire_group_on_off_event(
|
||||
name: str, address: Address, group: int, button: str
|
||||
):
|
||||
# Firing an event when a button is pressed.
|
||||
if button and button[-2] == "_":
|
||||
button_id = button[-1].lower()
|
||||
else:
|
||||
button_id = None
|
||||
|
||||
schema = {CONF_ADDRESS: address}
|
||||
schema = {CONF_ADDRESS: address, "group": group}
|
||||
if button_id:
|
||||
schema[EVENT_CONF_BUTTON] = button_id
|
||||
if name == ON_EVENT:
|
||||
event = EVENT_GROUP_ON
|
||||
if name == OFF_EVENT:
|
||||
elif name == OFF_EVENT:
|
||||
event = EVENT_GROUP_OFF
|
||||
if name == ON_FAST_EVENT:
|
||||
elif name == ON_FAST_EVENT:
|
||||
event = EVENT_GROUP_ON_FAST
|
||||
if name == OFF_FAST_EVENT:
|
||||
elif name == OFF_FAST_EVENT:
|
||||
event = EVENT_GROUP_OFF_FAST
|
||||
else:
|
||||
event = f"insteon.{name}"
|
||||
_LOGGER.debug("Firing event %s with %s", event, schema)
|
||||
hass.bus.async_fire(event, schema)
|
||||
|
||||
for group in device.events:
|
||||
if isinstance(group, int):
|
||||
for event in device.events[group]:
|
||||
if event in [
|
||||
OFF_EVENT,
|
||||
ON_EVENT,
|
||||
OFF_FAST_EVENT,
|
||||
ON_FAST_EVENT,
|
||||
]:
|
||||
_LOGGER.debug(
|
||||
"Registering on/off event for %s %d %s",
|
||||
str(device.address),
|
||||
group,
|
||||
event,
|
||||
)
|
||||
device.events[group][event].subscribe(
|
||||
async_fire_group_on_off_event, force_strong_ref=True
|
||||
)
|
||||
for name_or_group, event in device.events.items():
|
||||
if isinstance(name_or_group, int):
|
||||
for _, event in device.events[name_or_group].items():
|
||||
_register_event(event, async_fire_group_on_off_event)
|
||||
else:
|
||||
_register_event(event, async_fire_group_on_off_event)
|
||||
|
||||
|
||||
def register_new_device_callback(hass):
|
||||
|
||||
@@ -20,10 +20,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry_data[CONF_CLIENT_DEVICE_ID] = entry.entry_id
|
||||
hass.config_entries.async_update_entry(entry, data=entry_data)
|
||||
|
||||
client = create_client(
|
||||
device_id=entry.data[CONF_CLIENT_DEVICE_ID],
|
||||
device_name=hass.config.location_name,
|
||||
)
|
||||
device_id = entry.data[CONF_CLIENT_DEVICE_ID]
|
||||
device_name = ascii(hass.config.location_name)
|
||||
|
||||
client = create_client(device_id=device_id, device_name=device_name)
|
||||
|
||||
try:
|
||||
user_id, connect_result = await validate_input(hass, dict(entry.data), client)
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncGenerator
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from typing import Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -549,9 +550,12 @@ class KNXCommonFlow(ABC, FlowHandler):
|
||||
),
|
||||
None,
|
||||
)
|
||||
_tunnel_identifier = selected_tunnel_ia or self.new_entry_data.get(
|
||||
CONF_HOST
|
||||
)
|
||||
_tunnel_suffix = f" @ {_tunnel_identifier}" if _tunnel_identifier else ""
|
||||
self.new_title = (
|
||||
f"{'Secure ' if _if_user_id else ''}"
|
||||
f"Tunneling @ {selected_tunnel_ia or self.new_entry_data[CONF_HOST]}"
|
||||
f"{'Secure ' if _if_user_id else ''}Tunneling{_tunnel_suffix}"
|
||||
)
|
||||
return self.finish_flow()
|
||||
|
||||
@@ -708,7 +712,8 @@ class KNXCommonFlow(ABC, FlowHandler):
|
||||
else:
|
||||
dest_path = Path(self.hass.config.path(STORAGE_DIR, DOMAIN))
|
||||
dest_path.mkdir(exist_ok=True)
|
||||
file_path.rename(dest_path / DEFAULT_KNX_KEYRING_FILENAME)
|
||||
dest_file = dest_path / DEFAULT_KNX_KEYRING_FILENAME
|
||||
shutil.move(file_path, dest_file)
|
||||
return keyring, errors
|
||||
|
||||
keyring, errors = await self.hass.async_add_executor_job(_process_upload)
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["xknx"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["xknx==2.5.0"]
|
||||
"requirements": ["xknx==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ def ensure_zone(value):
|
||||
if value is None:
|
||||
raise vol.Invalid("zone value is None")
|
||||
|
||||
if str(value) not in ZONES is None:
|
||||
if str(value) not in ZONES:
|
||||
raise vol.Invalid("zone not valid")
|
||||
|
||||
return str(value)
|
||||
|
||||
@@ -140,7 +140,7 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = {
|
||||
name="Pet weight",
|
||||
native_unit_of_measurement=UnitOfMass.POUNDS,
|
||||
device_class=SensorDeviceClass.WEIGHT,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
],
|
||||
FeederRobot: [
|
||||
|
||||
@@ -33,6 +33,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.service import remove_entity_service_fields
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -92,7 +93,7 @@ async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for locking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_lock(**service_call.data)
|
||||
await entity.async_lock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -102,7 +103,7 @@ async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for unlocking {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_unlock(**service_call.data)
|
||||
await entity.async_unlock(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
@@ -112,7 +113,7 @@ async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None:
|
||||
raise ValueError(
|
||||
f"Code '{code}' for opening {entity.entity_id} doesn't match pattern {entity.code_format}"
|
||||
)
|
||||
await entity.async_open(**service_call.data)
|
||||
await entity.async_open(**remove_entity_service_fields(service_call))
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Matter light."""
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
@@ -260,12 +261,16 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
color_temp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
|
||||
if hs_color is not None and self.supports_color:
|
||||
await self._set_hs_color(hs_color)
|
||||
elif xy_color is not None:
|
||||
await self._set_xy_color(xy_color)
|
||||
elif color_temp is not None and self.supports_color_temperature:
|
||||
await self._set_color_temp(color_temp)
|
||||
if self.supported_color_modes is not None:
|
||||
if hs_color is not None and ColorMode.HS in self.supported_color_modes:
|
||||
await self._set_hs_color(hs_color)
|
||||
elif xy_color is not None and ColorMode.XY in self.supported_color_modes:
|
||||
await self._set_xy_color(xy_color)
|
||||
elif (
|
||||
color_temp is not None
|
||||
and ColorMode.COLOR_TEMP in self.supported_color_modes
|
||||
):
|
||||
await self._set_color_temp(color_temp)
|
||||
|
||||
if brightness is not None and self.supports_brightness:
|
||||
await self._set_brightness(brightness)
|
||||
@@ -284,7 +289,6 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
|
||||
if self._attr_supported_color_modes is None:
|
||||
# work out what (color)features are supported
|
||||
supported_color_modes: set[ColorMode] = set()
|
||||
@@ -297,30 +301,19 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
if self._entity_info.endpoint.has_attribute(
|
||||
None, clusters.ColorControl.Attributes.ColorMode
|
||||
):
|
||||
# device has some color support, check which color modes
|
||||
# are supported with the featuremap on the ColorControl cluster
|
||||
color_feature_map = self.get_matter_attribute_value(
|
||||
clusters.ColorControl.Attributes.FeatureMap,
|
||||
capabilities = self.get_matter_attribute_value(
|
||||
clusters.ColorControl.Attributes.ColorCapabilities
|
||||
)
|
||||
if (
|
||||
color_feature_map
|
||||
& clusters.ColorControl.Attributes.CurrentHue.attribute_id
|
||||
):
|
||||
|
||||
assert capabilities is not None
|
||||
|
||||
if capabilities & ColorCapabilities.kHueSaturationSupported:
|
||||
supported_color_modes.add(ColorMode.HS)
|
||||
if (
|
||||
color_feature_map
|
||||
& clusters.ColorControl.Attributes.CurrentX.attribute_id
|
||||
):
|
||||
|
||||
if capabilities & ColorCapabilities.kXYAttributesSupported:
|
||||
supported_color_modes.add(ColorMode.XY)
|
||||
|
||||
# color temperature support detection using the featuremap is not reliable
|
||||
# (temporary?) fallback to checking the value
|
||||
if (
|
||||
self.get_matter_attribute_value(
|
||||
clusters.ColorControl.Attributes.ColorTemperatureMireds
|
||||
)
|
||||
is not None
|
||||
):
|
||||
if capabilities & ColorCapabilities.kColorTemperatureSupported:
|
||||
supported_color_modes.add(ColorMode.COLOR_TEMP)
|
||||
|
||||
self._attr_supported_color_modes = supported_color_modes
|
||||
@@ -351,11 +344,23 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
self._attr_brightness = self._get_brightness()
|
||||
|
||||
|
||||
# This enum should be removed once the ColorControlCapabilities enum is added to the CHIP (Matter) library
|
||||
# clusters.ColorControl.Bitmap.ColorCapabilities
|
||||
class ColorCapabilities(IntFlag):
|
||||
"""Color control capabilities bitmap."""
|
||||
|
||||
kHueSaturationSupported = 0x1
|
||||
kEnhancedHueSupported = 0x2
|
||||
kColorLoopSupported = 0x4
|
||||
kXYAttributesSupported = 0x8
|
||||
kColorTemperatureSupported = 0x10
|
||||
|
||||
|
||||
# Discovery schema(s) to map Matter Attributes to HA entities
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.LIGHT,
|
||||
entity_description=LightEntityDescription(key="ExtendedMatterLight"),
|
||||
entity_description=LightEntityDescription(key="MatterLight"),
|
||||
entity_class=MatterLight,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
optional_attributes=(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pymazda"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymazda==0.3.7"]
|
||||
"requirements": ["pymazda==0.3.8"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from contextlib import suppress
|
||||
from functools import wraps
|
||||
from functools import lru_cache, wraps
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
import secrets
|
||||
@@ -365,6 +365,12 @@ async def webhook_stream_camera(
|
||||
return webhook_response(resp, registration=config_entry.data)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
@WEBHOOK_COMMANDS.register("render_template")
|
||||
@validate_schema(
|
||||
{
|
||||
@@ -381,7 +387,7 @@ async def webhook_render_template(
|
||||
resp = {}
|
||||
for key, item in data.items():
|
||||
try:
|
||||
tpl = template.Template(item[ATTR_TEMPLATE], hass)
|
||||
tpl = _cached_template(item[ATTR_TEMPLATE], hass)
|
||||
resp[key] = tpl.async_render(item.get(ATTR_TEMPLATE_VARIABLES))
|
||||
except TemplateError as ex:
|
||||
resp[key] = {"error": str(ex)}
|
||||
|
||||
@@ -16,7 +16,7 @@ from pymodbus.client import (
|
||||
from pymodbus.constants import Defaults
|
||||
from pymodbus.exceptions import ModbusException
|
||||
from pymodbus.pdu import ModbusResponse
|
||||
from pymodbus.transaction import ModbusRtuFramer
|
||||
from pymodbus.transaction import ModbusAsciiFramer, ModbusRtuFramer, ModbusSocketFramer
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -137,8 +137,10 @@ async def async_modbus_setup(
|
||||
for name in hubs:
|
||||
if not await hubs[name].async_setup():
|
||||
return False
|
||||
hub_collect = hass.data[DOMAIN]
|
||||
else:
|
||||
hass.data[DOMAIN] = hub_collect = {}
|
||||
|
||||
hass.data[DOMAIN] = hub_collect = {}
|
||||
for conf_hub in config[DOMAIN]:
|
||||
my_hub = ModbusHub(hass, conf_hub)
|
||||
hub_collect[conf_hub[CONF_NAME]] = my_hub
|
||||
@@ -279,9 +281,12 @@ class ModbusHub:
|
||||
}
|
||||
if self._config_type == SERIAL:
|
||||
# serial configuration
|
||||
if client_config[CONF_METHOD] == "ascii":
|
||||
self._pb_params["framer"] = ModbusAsciiFramer
|
||||
else:
|
||||
self._pb_params["framer"] = ModbusRtuFramer
|
||||
self._pb_params.update(
|
||||
{
|
||||
"method": client_config[CONF_METHOD],
|
||||
"baudrate": client_config[CONF_BAUDRATE],
|
||||
"stopbits": client_config[CONF_STOPBITS],
|
||||
"bytesize": client_config[CONF_BYTESIZE],
|
||||
@@ -293,6 +298,8 @@ class ModbusHub:
|
||||
self._pb_params["host"] = client_config[CONF_HOST]
|
||||
if self._config_type == RTUOVERTCP:
|
||||
self._pb_params["framer"] = ModbusRtuFramer
|
||||
else:
|
||||
self._pb_params["framer"] = ModbusSocketFramer
|
||||
|
||||
Defaults.Timeout = client_config[CONF_TIMEOUT]
|
||||
if CONF_MSG_WAIT in client_config:
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from motionblinds import MotionDiscovery
|
||||
from motionblinds import MotionDiscovery, MotionGateway
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -86,6 +86,16 @@ class MotionBlindsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(mac_address)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
gateway = MotionGateway(ip=discovery_info.ip, key="abcd1234-56ef-78")
|
||||
try:
|
||||
# key not needed for GetDeviceList request
|
||||
await self.hass.async_add_executor_job(gateway.GetDeviceList)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return self.async_abort(reason="not_motionblinds")
|
||||
|
||||
if not gateway.available:
|
||||
return self.async_abort(reason="not_motionblinds")
|
||||
|
||||
short_mac = mac_address[-6:].upper()
|
||||
self.context["title_placeholders"] = {
|
||||
"short_mac": short_mac,
|
||||
|
||||
@@ -28,7 +28,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"not_motionblinds": "Discovered device is not a Motion gateway"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -706,7 +706,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
for component in PLATFORMS
|
||||
)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await asyncio.sleep(0)
|
||||
# Unsubscribe reload dispatchers
|
||||
while reload_dispatchers := mqtt_data.reload_dispatchers:
|
||||
reload_dispatchers.pop()()
|
||||
|
||||
@@ -495,8 +495,12 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity):
|
||||
self._attr_color_mode = color_mode
|
||||
if self._topic[CONF_BRIGHTNESS_STATE_TOPIC] is None:
|
||||
rgb = convert_color(*color)
|
||||
percent_bright = float(color_util.color_RGB_to_hsv(*rgb)[2]) / 100.0
|
||||
self._attr_brightness = min(round(percent_bright * 255), 255)
|
||||
brightness = max(rgb)
|
||||
self._attr_brightness = brightness
|
||||
# Normalize the color to 100% brightness
|
||||
color = tuple(
|
||||
min(round(channel / brightness * 255), 255) for channel in color
|
||||
)
|
||||
return color
|
||||
|
||||
@callback
|
||||
|
||||
@@ -281,7 +281,7 @@ class MqttSensor(MqttEntity, RestoreSensor):
|
||||
else:
|
||||
self._attr_native_value = new_value
|
||||
return
|
||||
if self.device_class is None:
|
||||
if self.device_class in {None, SensorDeviceClass.ENUM}:
|
||||
self._attr_native_value = new_value
|
||||
return
|
||||
if (payload_datetime := dt_util.parse_datetime(new_value)) is None:
|
||||
|
||||
@@ -8,11 +8,11 @@ from datetime import timedelta
|
||||
from functools import cached_property
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
from nibe.connection import Connection
|
||||
from nibe.connection.modbus import Modbus
|
||||
from nibe.connection.nibegw import NibeGW, ProductInfo
|
||||
from nibe.exceptions import CoilNotFoundException, CoilReadException
|
||||
from nibe.exceptions import CoilNotFoundException, ReadException
|
||||
from nibe.heatpump import HeatPump, Model, Series
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -62,13 +62,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Nibe Heat Pump from a config entry."""
|
||||
|
||||
heatpump = HeatPump(Model[entry.data[CONF_MODEL]])
|
||||
heatpump.word_swap = entry.data.get(CONF_WORD_SWAP, True)
|
||||
await heatpump.initialize()
|
||||
|
||||
connection: Connection
|
||||
connection_type = entry.data[CONF_CONNECTION_TYPE]
|
||||
|
||||
if connection_type == CONF_CONNECTION_TYPE_NIBEGW:
|
||||
heatpump.word_swap = entry.data[CONF_WORD_SWAP]
|
||||
connection = NibeGW(
|
||||
heatpump,
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
@@ -182,7 +182,7 @@ class ContextCoordinator(
|
||||
return release_update
|
||||
|
||||
|
||||
class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
class Coordinator(ContextCoordinator[dict[int, CoilData], int]):
|
||||
"""Update coordinator for nibe heat pumps."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -199,17 +199,18 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
)
|
||||
|
||||
self.data = {}
|
||||
self.seed: dict[int, Coil] = {}
|
||||
self.seed: dict[int, CoilData] = {}
|
||||
self.connection = connection
|
||||
self.heatpump = heatpump
|
||||
self.task: asyncio.Task | None = None
|
||||
|
||||
heatpump.subscribe(heatpump.COIL_UPDATE_EVENT, self._on_coil_update)
|
||||
|
||||
def _on_coil_update(self, coil: Coil):
|
||||
def _on_coil_update(self, data: CoilData):
|
||||
"""Handle callback on coil updates."""
|
||||
self.data[coil.address] = coil
|
||||
self.seed[coil.address] = coil
|
||||
coil = data.coil
|
||||
self.data[coil.address] = data
|
||||
self.seed[coil.address] = data
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
@property
|
||||
@@ -246,26 +247,26 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
|
||||
async def async_write_coil(self, coil: Coil, value: int | float | str) -> None:
|
||||
"""Write coil and update state."""
|
||||
coil.value = value
|
||||
coil = await self.connection.write_coil(coil)
|
||||
data = CoilData(coil, value)
|
||||
await self.connection.write_coil(data)
|
||||
|
||||
self.data[coil.address] = coil
|
||||
self.data[coil.address] = data
|
||||
|
||||
self.async_update_context_listeners([coil.address])
|
||||
|
||||
async def async_read_coil(self, coil: Coil) -> Coil:
|
||||
async def async_read_coil(self, coil: Coil) -> CoilData:
|
||||
"""Read coil and update state using callbacks."""
|
||||
return await self.connection.read_coil(coil)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, Coil]:
|
||||
async def _async_update_data(self) -> dict[int, CoilData]:
|
||||
self.task = asyncio.current_task()
|
||||
try:
|
||||
return await self._async_update_data_internal()
|
||||
finally:
|
||||
self.task = None
|
||||
|
||||
async def _async_update_data_internal(self) -> dict[int, Coil]:
|
||||
result: dict[int, Coil] = {}
|
||||
async def _async_update_data_internal(self) -> dict[int, CoilData]:
|
||||
result: dict[int, CoilData] = {}
|
||||
|
||||
def _get_coils() -> Iterable[Coil]:
|
||||
for address in sorted(self.context_callbacks.keys()):
|
||||
@@ -282,10 +283,10 @@ class Coordinator(ContextCoordinator[dict[int, Coil], int]):
|
||||
yield coil
|
||||
|
||||
try:
|
||||
async for coil in self.connection.read_coils(_get_coils()):
|
||||
result[coil.address] = coil
|
||||
self.seed.pop(coil.address, None)
|
||||
except CoilReadException as exception:
|
||||
async for data in self.connection.read_coils(_get_coils()):
|
||||
result[data.coil.address] = data
|
||||
self.seed.pop(data.coil.address, None)
|
||||
except ReadException as exception:
|
||||
if not result:
|
||||
raise UpdateFailed(f"Failed to update: {exception}") from exception
|
||||
self.logger.debug(
|
||||
@@ -329,7 +330,7 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
self.coordinator.data or {}
|
||||
)
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
"""Update state of entity based on coil data."""
|
||||
|
||||
async def _async_write_coil(self, value: int | float | str):
|
||||
@@ -337,10 +338,9 @@ class CoilEntity(CoordinatorEntity[Coordinator]):
|
||||
await self.coordinator.async_write_coil(self._coil, value)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
coil = self.coordinator.data.get(self._coil.address)
|
||||
if coil is None:
|
||||
data = self.coordinator.data.get(self._coil.address)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self._coil = coil
|
||||
self._async_read_coil(coil)
|
||||
self._async_read_coil(data)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -37,5 +37,5 @@ class BinarySensor(CoilEntity, BinarySensorEntity):
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
||||
@@ -31,6 +31,7 @@ from . import Coordinator
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
VALUES_COOL_WITH_ROOM_SENSOR_OFF,
|
||||
VALUES_MIXING_VALVE_CLOSED_STATE,
|
||||
VALUES_PRIORITY_COOLING,
|
||||
VALUES_PRIORITY_HEATING,
|
||||
@@ -139,10 +140,13 @@ class NibeClimateEntity(CoordinatorEntity[Coordinator], ClimateEntity):
|
||||
|
||||
mode = HVACMode.OFF
|
||||
if _get_value(self._coil_use_room_sensor) == "ON":
|
||||
if _get_value(self._coil_cooling_with_room_sensor) == "ON":
|
||||
mode = HVACMode.HEAT_COOL
|
||||
else:
|
||||
if (
|
||||
_get_value(self._coil_cooling_with_room_sensor)
|
||||
in VALUES_COOL_WITH_ROOM_SENSOR_OFF
|
||||
):
|
||||
mode = HVACMode.HEAT
|
||||
else:
|
||||
mode = HVACMode.HEAT_COOL
|
||||
self._attr_hvac_mode = mode
|
||||
|
||||
setpoint_heat = _get_float(self._coil_setpoint_heat)
|
||||
|
||||
@@ -8,10 +8,10 @@ from nibe.connection.nibegw import NibeGW
|
||||
from nibe.exceptions import (
|
||||
AddressInUseException,
|
||||
CoilNotFoundException,
|
||||
CoilReadException,
|
||||
CoilReadSendException,
|
||||
CoilWriteException,
|
||||
CoilWriteSendException,
|
||||
ReadException,
|
||||
ReadSendException,
|
||||
WriteException,
|
||||
)
|
||||
from nibe.heatpump import HeatPump, Model
|
||||
import voluptuous as vol
|
||||
@@ -89,6 +89,7 @@ async def validate_nibegw_input(
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
heatpump = HeatPump(Model[data[CONF_MODEL]])
|
||||
heatpump.word_swap = True
|
||||
await heatpump.initialize()
|
||||
|
||||
connection = NibeGW(
|
||||
@@ -108,13 +109,13 @@ async def validate_nibegw_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_IP_ADDRESS, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
@@ -147,13 +148,13 @@ async def validate_modbus_input(
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (CoilReadSendException, CoilWriteSendException) as exception:
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
raise FieldError(str(exception), CONF_MODBUS_URL, "address") from exception
|
||||
except CoilNotFoundException as exception:
|
||||
raise FieldError("Coils not found", "base", "model") from exception
|
||||
except CoilReadException as exception:
|
||||
except ReadException as exception:
|
||||
raise FieldError("Timeout on read from pump", "base", "read") from exception
|
||||
except CoilWriteException as exception:
|
||||
except WriteException as exception:
|
||||
raise FieldError("Timeout on writing to pump", "base", "write") from exception
|
||||
finally:
|
||||
await connection.stop()
|
||||
|
||||
@@ -17,3 +17,4 @@ CONF_MODBUS_UNIT = "modbus_unit"
|
||||
VALUES_MIXING_VALVE_CLOSED_STATE = (30, "CLOSED", "SHUNT CLOSED")
|
||||
VALUES_PRIORITY_HEATING = (30, "HEAT")
|
||||
VALUES_PRIORITY_COOLING = (60, "COOLING")
|
||||
VALUES_COOL_WITH_ROOM_SENSOR_OFF = (0, "OFF")
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==1.6.0"]
|
||||
"requirements": ["nibe==2.1.4"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump numbers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.number import ENTITY_ID_FORMAT, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -58,13 +58,13 @@ class Number(CoilEntity, NumberEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_native_value = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if coil.value is None:
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if data.value is None:
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = float(coil.value)
|
||||
self._attr_native_value = float(data.value)
|
||||
except ValueError:
|
||||
self._attr_native_value = None
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump select."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.select import ENTITY_ID_FORMAT, SelectEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,12 +40,12 @@ class Select(CoilEntity, SelectEntity):
|
||||
self._attr_options = list(coil.mappings.values())
|
||||
self._attr_current_option = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
if not isinstance(coil.value, str):
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
if not isinstance(data.value, str):
|
||||
self._attr_current_option = None
|
||||
return
|
||||
|
||||
self._attr_current_option = coil.value
|
||||
self._attr_current_option = data.value
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Support writing value."""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Nibe Heat Pump sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
ENTITY_ID_FORMAT,
|
||||
@@ -146,5 +146,5 @@ class Sensor(CoilEntity, SensorEntity):
|
||||
self._attr_native_unit_of_measurement = coil.unit
|
||||
self._attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def _async_read_coil(self, coil: Coil):
|
||||
self._attr_native_value = coil.value
|
||||
def _async_read_coil(self, data: CoilData):
|
||||
self._attr_native_value = data.value
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from nibe.coil import Coil
|
||||
from nibe.coil import Coil, CoilData
|
||||
|
||||
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -40,8 +40,8 @@ class Switch(CoilEntity, SwitchEntity):
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
self._attr_is_on = None
|
||||
|
||||
def _async_read_coil(self, coil: Coil) -> None:
|
||||
self._attr_is_on = coil.value == "ON"
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nuheat",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nuheat"],
|
||||
"requirements": ["nuheat==1.0.0"]
|
||||
"requirements": ["nuheat==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,6 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/oralb",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["oralb-ble==0.17.5"]
|
||||
"loggers": ["oralb-ble"],
|
||||
"requirements": ["oralb-ble==0.17.6"]
|
||||
}
|
||||
|
||||
@@ -9,11 +9,14 @@ from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
from python_otbr_api.pskc import compute_pskc
|
||||
|
||||
from homeassistant.components.thread import async_add_dataset
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -23,6 +26,18 @@ from .const import DOMAIN
|
||||
_R = TypeVar("_R")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
INSECURE_NETWORK_KEYS = (
|
||||
# Thread web UI default
|
||||
bytes.fromhex("00112233445566778899AABBCCDDEEFF"),
|
||||
)
|
||||
|
||||
INSECURE_PASSPHRASES = (
|
||||
# Thread web UI default
|
||||
"j01Nme",
|
||||
# Thread documentation default
|
||||
"J01NME",
|
||||
)
|
||||
|
||||
|
||||
def _handle_otbr_error(
|
||||
func: Callable[Concatenate[OTBRData, _P], Coroutine[Any, Any, _R]]
|
||||
@@ -46,11 +61,23 @@ class OTBRData:
|
||||
url: str
|
||||
api: python_otbr_api.OTBR
|
||||
|
||||
@_handle_otbr_error
|
||||
async def set_enabled(self, enabled: bool) -> None:
|
||||
"""Enable or disable the router."""
|
||||
return await self.api.set_enabled(enabled)
|
||||
|
||||
@_handle_otbr_error
|
||||
async def get_active_dataset_tlvs(self) -> bytes | None:
|
||||
"""Get current active operational dataset in TLVS format, or None."""
|
||||
return await self.api.get_active_dataset_tlvs()
|
||||
|
||||
@_handle_otbr_error
|
||||
async def create_active_dataset(
|
||||
self, dataset: python_otbr_api.OperationalDataSet
|
||||
) -> None:
|
||||
"""Create an active operational dataset."""
|
||||
return await self.api.create_active_dataset(dataset)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Open Thread Border Router component."""
|
||||
@@ -58,21 +85,65 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _warn_on_default_network_settings(
|
||||
hass: HomeAssistant, entry: ConfigEntry, dataset_tlvs: bytes
|
||||
) -> None:
|
||||
"""Warn user if insecure default network settings are used."""
|
||||
dataset = tlv_parser.parse_tlv(dataset_tlvs.hex())
|
||||
insecure = False
|
||||
|
||||
if (
|
||||
network_key := dataset.get(tlv_parser.MeshcopTLVType.NETWORKKEY)
|
||||
) is not None and bytes.fromhex(network_key) in INSECURE_NETWORK_KEYS:
|
||||
insecure = True
|
||||
if (
|
||||
not insecure
|
||||
and tlv_parser.MeshcopTLVType.EXTPANID in dataset
|
||||
and tlv_parser.MeshcopTLVType.NETWORKNAME in dataset
|
||||
and tlv_parser.MeshcopTLVType.PSKC in dataset
|
||||
):
|
||||
ext_pan_id = dataset[tlv_parser.MeshcopTLVType.EXTPANID]
|
||||
network_name = dataset[tlv_parser.MeshcopTLVType.NETWORKNAME]
|
||||
pskc = bytes.fromhex(dataset[tlv_parser.MeshcopTLVType.PSKC])
|
||||
for passphrase in INSECURE_PASSPHRASES:
|
||||
if pskc == compute_pskc(ext_pan_id, network_name, passphrase):
|
||||
insecure = True
|
||||
break
|
||||
|
||||
if insecure:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="insecure_thread_network",
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up an Open Thread Border Router config entry."""
|
||||
api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10)
|
||||
|
||||
otbrdata = OTBRData(entry.data["url"], api)
|
||||
try:
|
||||
dataset = await otbrdata.get_active_dataset_tlvs()
|
||||
dataset_tlvs = await otbrdata.get_active_dataset_tlvs()
|
||||
except (
|
||||
HomeAssistantError,
|
||||
aiohttp.ClientError,
|
||||
asyncio.TimeoutError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
if dataset:
|
||||
await async_add_dataset(hass, entry.title, dataset.hex())
|
||||
if dataset_tlvs:
|
||||
_warn_on_default_network_settings(hass, entry, dataset_tlvs)
|
||||
await async_add_dataset(hass, entry.title, dataset_tlvs.hex())
|
||||
|
||||
hass.data[DOMAIN] = otbrdata
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
@@ -15,7 +16,7 @@ from homeassistant.const import CONF_URL
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,11 +30,26 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Connect to the OTBR and create a dataset if it doesn't have one."""
|
||||
api = python_otbr_api.OTBR(url, async_get_clientsession(self.hass), 10)
|
||||
if await api.get_active_dataset_tlvs() is None:
|
||||
if dataset := await async_get_preferred_dataset(self.hass):
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(dataset))
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
thread_dataset_channel = None
|
||||
thread_dataset_tlv = await async_get_preferred_dataset(self.hass)
|
||||
if thread_dataset_tlv:
|
||||
dataset = tlv_parser.parse_tlv(thread_dataset_tlv)
|
||||
if channel_str := dataset.get(tlv_parser.MeshcopTLVType.CHANNEL):
|
||||
thread_dataset_channel = int(channel_str, base=16)
|
||||
|
||||
if thread_dataset_tlv is not None and zha_channel == thread_dataset_channel:
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv))
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"not importing TLV with channel %s", thread_dataset_channel
|
||||
)
|
||||
await api.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(network_name="home-assistant")
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
await api.set_enabled(True)
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"""Constants for the Open Thread Border Router integration."""
|
||||
|
||||
DOMAIN = "otbr"
|
||||
|
||||
DEFAULT_CHANNEL = 15
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.4"]
|
||||
"requirements": ["python-otbr-api==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,13 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"insecure_thread_network": {
|
||||
"title": "Insecure Thread network settings detected",
|
||||
"description": "Your Thread network is using a default network key or pass phrase.\n\nThis is a security risk, please create a new Thread network."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Websocket API for OTBR."""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import python_otbr_api
|
||||
|
||||
from homeassistant.components.websocket_api import (
|
||||
ActiveConnection,
|
||||
async_register_command,
|
||||
@@ -10,7 +12,7 @@ from homeassistant.components.websocket_api import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import OTBRData
|
||||
@@ -20,6 +22,7 @@ if TYPE_CHECKING:
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the OTBR Websocket API."""
|
||||
async_register_command(hass, websocket_info)
|
||||
async_register_command(hass, websocket_create_network)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
@@ -51,3 +54,48 @@ async def websocket_info(
|
||||
"active_dataset_tlvs": dataset.hex() if dataset else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
{
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
@async_response
|
||||
async def websocket_create_network(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Create a new Thread network."""
|
||||
if DOMAIN not in hass.data:
|
||||
connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded")
|
||||
return
|
||||
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
|
||||
data: OTBRData = hass.data[DOMAIN]
|
||||
|
||||
try:
|
||||
await data.set_enabled(False)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "create_active_dataset_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.set_enabled(True)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.const import (
|
||||
UnitOfPower,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -303,9 +304,9 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_interval",
|
||||
name="Gas consumed interval",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
icon="mdi:meter-gas",
|
||||
native_unit_of_measurement=f"{UnitOfVolume.CUBIC_METERS}/{UnitOfTime.HOURS}",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="gas_consumed_cumulative",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiopvpc"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiopvpc==4.0.1"]
|
||||
"requirements": ["aiopvpc==4.1.0"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioqsw"],
|
||||
"requirements": ["aioqsw==0.3.1"]
|
||||
"requirements": ["aioqsw==0.3.2"]
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ from sqlalchemy.orm.query import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
|
||||
from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
@@ -283,9 +282,11 @@ def _significant_states_stmt(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
stmt += lambda q: q.filter(
|
||||
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
||||
)
|
||||
else:
|
||||
stmt += lambda q: q.filter(
|
||||
(States.last_changed == States.last_updated)
|
||||
| States.last_changed.is_(None)
|
||||
)
|
||||
elif significant_changes_only:
|
||||
if schema_version >= 31:
|
||||
stmt += lambda q: q.filter(
|
||||
@@ -592,48 +593,6 @@ def get_last_state_changes(
|
||||
)
|
||||
|
||||
|
||||
def _generate_most_recent_states_for_entities_by_date(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states for specific entities by date."""
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_entities_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
@@ -645,16 +604,29 @@ def _get_states_for_entities_stmt(
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
most_recent_states_for_entities_by_date = (
|
||||
_generate_most_recent_states_for_entities_by_date(
|
||||
schema_version, run_start, utc_point_in_time, entity_ids
|
||||
)
|
||||
)
|
||||
# We got an include-list of entities, accelerate the query by filtering already
|
||||
# in the inner query.
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
(
|
||||
most_recent_states_for_entities_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id
|
||||
== most_recent_states_for_entities_by_date.c.max_entity_id,
|
||||
@@ -664,7 +636,21 @@ def _get_states_for_entities_stmt(
|
||||
)
|
||||
else:
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
(
|
||||
most_recent_states_for_entities_by_date := select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
),
|
||||
and_(
|
||||
States.entity_id
|
||||
== most_recent_states_for_entities_by_date.c.max_entity_id,
|
||||
@@ -679,45 +665,6 @@ def _get_states_for_entities_stmt(
|
||||
return stmt
|
||||
|
||||
|
||||
def _generate_most_recent_states_by_date(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states by date."""
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_all_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
@@ -733,12 +680,26 @@ def _get_states_for_all_stmt(
|
||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||
# This filtering can't be done in the inner query because the domain column is
|
||||
# not indexed and we can't control what's in the custom filter.
|
||||
most_recent_states_by_date = _generate_most_recent_states_by_date(
|
||||
schema_version, run_start, utc_point_in_time
|
||||
)
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_by_date,
|
||||
(
|
||||
most_recent_states_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id == most_recent_states_by_date.c.max_entity_id,
|
||||
States.last_updated_ts == most_recent_states_by_date.c.max_last_updated,
|
||||
@@ -746,7 +707,22 @@ def _get_states_for_all_stmt(
|
||||
)
|
||||
else:
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_by_date,
|
||||
(
|
||||
most_recent_states_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id == most_recent_states_by_date.c.max_entity_id,
|
||||
States.last_updated == most_recent_states_by_date.c.max_last_updated,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user