forked from home-assistant/core
Compare commits
65 Commits
2023.3.0b5
...
2023.3.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3dca4c2f23 | ||
|
|
3f8f38f2df | ||
|
|
0844a0b269 | ||
|
|
b65180d20a | ||
|
|
7f8a9697f0 | ||
|
|
563bd4a0dd | ||
|
|
29b5ef31c1 | ||
|
|
863f8b727d | ||
|
|
83ed8cf689 | ||
|
|
52cd2f9429 | ||
|
|
74d3b2374b | ||
|
|
f982af2412 | ||
|
|
0b5ddd9cbf | ||
|
|
8d1aa0132e | ||
|
|
d737b97c91 | ||
|
|
0fac12866d | ||
|
|
e3fe71f76e | ||
|
|
eba1bfad51 | ||
|
|
1a0a385e03 | ||
|
|
c9999cd08c | ||
|
|
8252aeead2 | ||
|
|
c27a69ef85 | ||
|
|
d4c28a1f4a | ||
|
|
322eb4bd83 | ||
|
|
f0f12fd14a | ||
|
|
1836e35717 | ||
|
|
4eb55146be | ||
|
|
b1ee6e304e | ||
|
|
d0b195516b | ||
|
|
a867f1d3c8 | ||
|
|
f7eaeb7a39 | ||
|
|
3e961d3e17 | ||
|
|
c28e16fa8b | ||
|
|
e2e8d74aa6 | ||
|
|
8a9fbd650a | ||
|
|
243725efe3 | ||
|
|
8d59489da8 | ||
|
|
c146413a1a | ||
|
|
a46d63a11b | ||
|
|
db4f6fb94d | ||
|
|
c50c920589 | ||
|
|
fe22aa0b4b | ||
|
|
a0162e4986 | ||
|
|
62c5cf51f5 | ||
|
|
89aebba3ab | ||
|
|
6c73b9024b | ||
|
|
59a9ace171 | ||
|
|
e751948bc8 | ||
|
|
702646427d | ||
|
|
8a605b1377 | ||
|
|
8eb8415d3f | ||
|
|
9f3f71d0c3 | ||
|
|
b82da9418d | ||
|
|
38cf725075 | ||
|
|
04cedab8d4 | ||
|
|
2238a3f201 | ||
|
|
f58ca17926 | ||
|
|
d5e517b874 | ||
|
|
f9eeb4f4d8 | ||
|
|
86d5e4aaa8 | ||
|
|
a56935ed7c | ||
|
|
fc56c958c3 | ||
|
|
a8e1dc8962 | ||
|
|
32b138b6c6 | ||
|
|
2112c66804 |
@@ -1100,6 +1100,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
|
||||
5
homeassistant/brands/heltun.json
Normal file
5
homeassistant/brands/heltun.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heltun",
|
||||
"name": "HELTUN",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -68,7 +68,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="co",
|
||||
name="CO",
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("co"),
|
||||
@@ -289,7 +288,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc",
|
||||
name="VOC",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc"),
|
||||
@@ -297,7 +295,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc_ionsc",
|
||||
name="VOC (Industrial)",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc_ionsc"),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Rest API for Home Assistant."""
|
||||
import asyncio
|
||||
from functools import lru_cache
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
@@ -350,6 +351,12 @@ class APIComponentsView(HomeAssistantView):
|
||||
return self.json(request.app["hass"].config.components)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: ha.HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
class APITemplateView(HomeAssistantView):
|
||||
"""View to handle Template requests."""
|
||||
|
||||
@@ -362,7 +369,7 @@ class APITemplateView(HomeAssistantView):
|
||||
raise Unauthorized()
|
||||
try:
|
||||
data = await request.json()
|
||||
tpl = template.Template(data["template"], request.app["hass"])
|
||||
tpl = _cached_template(data["template"], request.app["hass"])
|
||||
return tpl.async_render(variables=data.get("variables"), parse_result=False)
|
||||
except (ValueError, TemplateError) as ex:
|
||||
return self.json_message(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.2"]
|
||||
"requirements": ["auroranoaa==0.0.3"]
|
||||
}
|
||||
|
||||
@@ -227,20 +227,21 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
||||
self.hass, self._async_expire_devices, timedelta(seconds=30)
|
||||
)
|
||||
cancel_stop = self.hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP, self._save_history
|
||||
EVENT_HOMEASSISTANT_STOP, self._async_save_history
|
||||
)
|
||||
self._async_setup_scanner_watchdog()
|
||||
|
||||
@hass_callback
|
||||
def _cancel() -> None:
|
||||
self._save_history()
|
||||
self._async_save_history()
|
||||
self._async_stop_scanner_watchdog()
|
||||
cancel_track()
|
||||
cancel_stop()
|
||||
|
||||
return _cancel
|
||||
|
||||
def _save_history(self, event: Event | None = None) -> None:
|
||||
@hass_callback
|
||||
def _async_save_history(self, event: Event | None = None) -> None:
|
||||
"""Save the history."""
|
||||
self._storage.async_set_advertisement_history(
|
||||
self.source,
|
||||
@@ -252,6 +253,7 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
||||
),
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def _async_expire_devices(self, _datetime: datetime.datetime) -> None:
|
||||
"""Expire old devices."""
|
||||
now = MONOTONIC_TIME()
|
||||
|
||||
@@ -257,9 +257,9 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
# This is available in the response template as "state".
|
||||
state1: core.State | None = None
|
||||
if intent_response.matched_states:
|
||||
state1 = intent_response.matched_states[0]
|
||||
state1 = matched[0]
|
||||
elif intent_response.unmatched_states:
|
||||
state1 = intent_response.unmatched_states[0]
|
||||
state1 = unmatched[0]
|
||||
|
||||
# Render response template
|
||||
speech = response_template.async_render(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.0.5", "home-assistant-intents==2023.2.22"]
|
||||
"requirements": ["hassil==1.0.6", "home-assistant-intents==2023.2.28"]
|
||||
}
|
||||
|
||||
@@ -8,11 +8,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
@@ -135,7 +131,6 @@ class DerivativeSensor(RestoreEntity, SensorEntity):
|
||||
|
||||
_attr_icon = ICON
|
||||
_attr_should_poll = False
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import CONF_ASSOCIATION_DATA, DOMAIN, UPDATE_SECONDS
|
||||
from .models import DormakabaDkeyData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.LOCK, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.LOCK, Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -132,7 +132,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
association_data = await lock.associate(user_input["activation_code"])
|
||||
except BleakError:
|
||||
except BleakError as err:
|
||||
_LOGGER.warning("BleakError", exc_info=err)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except dkey_errors.InvalidActivationCode:
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dormakaba_dkey",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["py-dormakaba-dkey==1.0.2"]
|
||||
"requirements": ["py-dormakaba-dkey==1.0.4"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from random import randint
|
||||
|
||||
from enturclient import EnturPublicTransportData
|
||||
import voluptuous as vol
|
||||
@@ -22,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
API_CLIENT_NAME = "homeassistant-homeassistant"
|
||||
API_CLIENT_NAME = "homeassistant-{}"
|
||||
|
||||
CONF_STOP_IDS = "stop_ids"
|
||||
CONF_EXPAND_PLATFORMS = "expand_platforms"
|
||||
@@ -105,7 +106,7 @@ async def async_setup_platform(
|
||||
quays = [s for s in stop_ids if "Quay" in s]
|
||||
|
||||
data = EnturPublicTransportData(
|
||||
API_CLIENT_NAME,
|
||||
API_CLIENT_NAME.format(str(randint(100000, 999999))),
|
||||
stops=stops,
|
||||
quays=quays,
|
||||
line_whitelist=line_whitelist,
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol"],
|
||||
"requirements": ["aioesphomeapi==13.4.1", "esphome-dashboard-api==1.2.3"],
|
||||
"requirements": ["aioesphomeapi==13.4.2", "esphome-dashboard-api==1.2.3"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.6.8"]
|
||||
"requirements": ["pyfibaro==0.6.9"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230227.0"]
|
||||
"requirements": ["home-assistant-frontend==20230306.0"]
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusClimateZone(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] in GH_ZONES
|
||||
if z.data.get("type") in GH_ZONES
|
||||
]
|
||||
)
|
||||
|
||||
@@ -79,10 +79,10 @@ class GeniusClimateZone(GeniusHeatingZone, ClimateEntity):
|
||||
def hvac_action(self) -> str | None:
|
||||
"""Return the current running hvac operation if supported."""
|
||||
if "_state" in self._zone.data: # only for v3 API
|
||||
if self._zone.data["output"] == 1:
|
||||
return HVACAction.HEATING
|
||||
if not self._zone.data["_state"].get("bIsActive"):
|
||||
return HVACAction.OFF
|
||||
if self._zone.data["_state"].get("bOutRequestHeat"):
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
return None
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusSwitch(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] == GH_ON_OFF_ZONE
|
||||
if z.data.get("type") == GH_ON_OFF_ZONE
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ async def async_setup_platform(
|
||||
[
|
||||
GeniusWaterHeater(broker, z)
|
||||
for z in broker.client.zone_objs
|
||||
if z.data["type"] in GH_HEATERS
|
||||
if z.data.get("type") in GH_HEATERS
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -832,7 +832,7 @@ class TemperatureControlTrait(_Trait):
|
||||
"temperatureUnitForUX": _google_temp_unit(
|
||||
self.hass.config.units.temperature_unit
|
||||
),
|
||||
"queryOnlyTemperatureSetting": True,
|
||||
"queryOnlyTemperatureControl": True,
|
||||
"temperatureRange": {
|
||||
"minThresholdCelsius": -100,
|
||||
"maxThresholdCelsius": 100,
|
||||
|
||||
@@ -36,6 +36,7 @@ X_AUTH_TOKEN = "X-Supervisor-Token"
|
||||
X_INGRESS_PATH = "X-Ingress-Path"
|
||||
X_HASS_USER_ID = "X-Hass-User-ID"
|
||||
X_HASS_IS_ADMIN = "X-Hass-Is-Admin"
|
||||
X_HASS_SOURCE = "X-Hass-Source"
|
||||
|
||||
WS_TYPE = "type"
|
||||
WS_ID = "id"
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.const import SERVER_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .const import ATTR_DISCOVERY, DOMAIN
|
||||
from .const import ATTR_DISCOVERY, DOMAIN, X_HASS_SOURCE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -445,6 +445,8 @@ class HassIO:
|
||||
payload=None,
|
||||
timeout=10,
|
||||
return_text=False,
|
||||
*,
|
||||
source="core.handler",
|
||||
):
|
||||
"""Send API command to Hass.io.
|
||||
|
||||
@@ -458,7 +460,8 @@ class HassIO:
|
||||
headers={
|
||||
aiohttp.hdrs.AUTHORIZATION: (
|
||||
f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}"
|
||||
)
|
||||
),
|
||||
X_HASS_SOURCE: source,
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ from http import HTTPStatus
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
@@ -19,13 +20,16 @@ from aiohttp.hdrs import (
|
||||
TRANSFER_ENCODING,
|
||||
)
|
||||
from aiohttp.web_exceptions import HTTPBadGateway
|
||||
from multidict import istr
|
||||
|
||||
from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView
|
||||
from homeassistant.components.http import (
|
||||
KEY_AUTHENTICATED,
|
||||
KEY_HASS_USER,
|
||||
HomeAssistantView,
|
||||
)
|
||||
from homeassistant.components.onboarding import async_is_onboarded
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import X_HASS_IS_ADMIN, X_HASS_USER_ID
|
||||
from .const import X_HASS_SOURCE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,23 +38,53 @@ MAX_UPLOAD_SIZE = 1024 * 1024 * 1024
|
||||
# pylint: disable=implicit-str-concat
|
||||
NO_TIMEOUT = re.compile(
|
||||
r"^(?:"
|
||||
r"|homeassistant/update"
|
||||
r"|hassos/update"
|
||||
r"|hassos/update/cli"
|
||||
r"|supervisor/update"
|
||||
r"|addons/[^/]+/(?:update|install|rebuild)"
|
||||
r"|backups/.+/full"
|
||||
r"|backups/.+/partial"
|
||||
r"|backups/[^/]+/(?:upload|download)"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_AUTH_ONBOARDING = re.compile(r"^(?:" r"|supervisor/logs" r"|backups/[^/]+/.+" r")$")
|
||||
# fmt: off
|
||||
# Onboarding can upload backups and restore it
|
||||
PATHS_NOT_ONBOARDED = re.compile(
|
||||
r"^(?:"
|
||||
r"|backups/[a-f0-9]{8}(/info|/new/upload|/download|/restore/full|/restore/partial)?"
|
||||
r"|backups/new/upload"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_AUTH = re.compile(r"^(?:" r"|app/.*" r"|[store\/]*addons/[^/]+/(logo|icon)" r")$")
|
||||
# Authenticated users manage backups + download logs
|
||||
PATHS_ADMIN = re.compile(
|
||||
r"^(?:"
|
||||
r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?"
|
||||
r"|backups/new/upload"
|
||||
r"|audio/logs"
|
||||
r"|cli/logs"
|
||||
r"|core/logs"
|
||||
r"|dns/logs"
|
||||
r"|host/logs"
|
||||
r"|multicast/logs"
|
||||
r"|observer/logs"
|
||||
r"|supervisor/logs"
|
||||
r"|addons/[^/]+/logs"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_STORE = re.compile(r"^(?:" r"|app/entrypoint.js" r")$")
|
||||
# Unauthenticated requests come in for Supervisor panel + add-on images
|
||||
PATHS_NO_AUTH = re.compile(
|
||||
r"^(?:"
|
||||
r"|app/.*"
|
||||
r"|(store/)?addons/[^/]+/(logo|icon)"
|
||||
r")$"
|
||||
)
|
||||
|
||||
NO_STORE = re.compile(
|
||||
r"^(?:"
|
||||
r"|app/entrypoint.js"
|
||||
r")$"
|
||||
)
|
||||
# pylint: enable=implicit-str-concat
|
||||
# fmt: on
|
||||
|
||||
|
||||
class HassIOView(HomeAssistantView):
|
||||
@@ -65,38 +99,66 @@ class HassIOView(HomeAssistantView):
|
||||
self._host = host
|
||||
self._websession = websession
|
||||
|
||||
async def _handle(
|
||||
self, request: web.Request, path: str
|
||||
) -> web.Response | web.StreamResponse:
|
||||
"""Route data to Hass.io."""
|
||||
hass = request.app["hass"]
|
||||
if _need_auth(hass, path) and not request[KEY_AUTHENTICATED]:
|
||||
return web.Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
return await self._command_proxy(path, request)
|
||||
|
||||
delete = _handle
|
||||
get = _handle
|
||||
post = _handle
|
||||
|
||||
async def _command_proxy(
|
||||
self, path: str, request: web.Request
|
||||
) -> web.StreamResponse:
|
||||
async def _handle(self, request: web.Request, path: str) -> web.StreamResponse:
|
||||
"""Return a client request with proxy origin for Hass.io supervisor.
|
||||
|
||||
This method is a coroutine.
|
||||
Use cases:
|
||||
- Onboarding allows restoring backups
|
||||
- Load Supervisor panel and add-on logo unauthenticated
|
||||
- User upload/restore backups
|
||||
"""
|
||||
headers = _init_header(request)
|
||||
if path == "backups/new/upload":
|
||||
# We need to reuse the full content type that includes the boundary
|
||||
headers[
|
||||
CONTENT_TYPE
|
||||
] = request._stored_content_type # pylint: disable=protected-access
|
||||
# No bullshit
|
||||
if path != unquote(path):
|
||||
return web.Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
hass: HomeAssistant = request.app["hass"]
|
||||
is_admin = request[KEY_AUTHENTICATED] and request[KEY_HASS_USER].is_admin
|
||||
authorized = is_admin
|
||||
|
||||
if is_admin:
|
||||
allowed_paths = PATHS_ADMIN
|
||||
|
||||
elif not async_is_onboarded(hass):
|
||||
allowed_paths = PATHS_NOT_ONBOARDED
|
||||
|
||||
# During onboarding we need the user to manage backups
|
||||
authorized = True
|
||||
|
||||
else:
|
||||
# Either unauthenticated or not an admin
|
||||
allowed_paths = PATHS_NO_AUTH
|
||||
|
||||
no_auth_path = PATHS_NO_AUTH.match(path)
|
||||
headers = {
|
||||
X_HASS_SOURCE: "core.http",
|
||||
}
|
||||
|
||||
if no_auth_path:
|
||||
if request.method != "GET":
|
||||
return web.Response(status=HTTPStatus.METHOD_NOT_ALLOWED)
|
||||
|
||||
else:
|
||||
if not allowed_paths.match(path):
|
||||
return web.Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
if authorized:
|
||||
headers[
|
||||
AUTHORIZATION
|
||||
] = f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}"
|
||||
|
||||
if request.method == "POST":
|
||||
headers[CONTENT_TYPE] = request.content_type
|
||||
# _stored_content_type is only computed once `content_type` is accessed
|
||||
if path == "backups/new/upload":
|
||||
# We need to reuse the full content type that includes the boundary
|
||||
headers[
|
||||
CONTENT_TYPE
|
||||
] = request._stored_content_type # pylint: disable=protected-access
|
||||
|
||||
try:
|
||||
client = await self._websession.request(
|
||||
method=request.method,
|
||||
url=f"http://{self._host}/{path}",
|
||||
url=f"http://{self._host}/{quote(path)}",
|
||||
params=request.query,
|
||||
data=request.content,
|
||||
headers=headers,
|
||||
@@ -123,20 +185,8 @@ class HassIOView(HomeAssistantView):
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
|
||||
def _init_header(request: web.Request) -> dict[istr, str]:
|
||||
"""Create initial header."""
|
||||
headers = {
|
||||
AUTHORIZATION: f"Bearer {os.environ.get('SUPERVISOR_TOKEN', '')}",
|
||||
CONTENT_TYPE: request.content_type,
|
||||
}
|
||||
|
||||
# Add user data
|
||||
if request.get("hass_user") is not None:
|
||||
headers[istr(X_HASS_USER_ID)] = request["hass_user"].id
|
||||
headers[istr(X_HASS_IS_ADMIN)] = str(int(request["hass_user"].is_admin))
|
||||
|
||||
return headers
|
||||
get = _handle
|
||||
post = _handle
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse, path: str) -> dict[str, str]:
|
||||
@@ -164,12 +214,3 @@ def _get_timeout(path: str) -> ClientTimeout:
|
||||
if NO_TIMEOUT.match(path):
|
||||
return ClientTimeout(connect=10, total=None)
|
||||
return ClientTimeout(connect=10, total=300)
|
||||
|
||||
|
||||
def _need_auth(hass: HomeAssistant, path: str) -> bool:
|
||||
"""Return if a path need authentication."""
|
||||
if not async_is_onboarded(hass) and NO_AUTH_ONBOARDING.match(path):
|
||||
return False
|
||||
if NO_AUTH.match(path):
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -3,20 +3,22 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from functools import lru_cache
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import quote
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientTimeout, hdrs, web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPBadRequest
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import X_AUTH_TOKEN, X_INGRESS_PATH
|
||||
from .const import X_HASS_SOURCE, X_INGRESS_PATH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,9 +44,19 @@ class HassIOIngress(HomeAssistantView):
|
||||
self._host = host
|
||||
self._websession = websession
|
||||
|
||||
@lru_cache
|
||||
def _create_url(self, token: str, path: str) -> str:
|
||||
"""Create URL to service."""
|
||||
return f"http://{self._host}/ingress/{token}/{path}"
|
||||
base_path = f"/ingress/{token}/"
|
||||
url = f"http://{self._host}{base_path}{quote(path)}"
|
||||
|
||||
try:
|
||||
if not URL(url).path.startswith(base_path):
|
||||
raise HTTPBadRequest()
|
||||
except ValueError as err:
|
||||
raise HTTPBadRequest() from err
|
||||
|
||||
return url
|
||||
|
||||
async def _handle(
|
||||
self, request: web.Request, token: str, path: str
|
||||
@@ -185,10 +197,8 @@ def _init_header(request: web.Request, token: str) -> CIMultiDict | dict[str, st
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Inject token / cleanup later on Supervisor
|
||||
headers[X_AUTH_TOKEN] = os.environ.get("SUPERVISOR_TOKEN", "")
|
||||
|
||||
# Ingress information
|
||||
headers[X_HASS_SOURCE] = "core.ingress"
|
||||
headers[X_INGRESS_PATH] = f"/api/hassio_ingress/{token}"
|
||||
|
||||
# Set X-Forwarded-For
|
||||
|
||||
@@ -116,6 +116,7 @@ async def websocket_supervisor_api(
|
||||
method=msg[ATTR_METHOD],
|
||||
timeout=msg.get(ATTR_TIMEOUT, 10),
|
||||
payload=msg.get(ATTR_DATA, {}),
|
||||
source="core.websocket_api",
|
||||
)
|
||||
|
||||
if result.get(ATTR_RESULT) == "error":
|
||||
|
||||
@@ -421,6 +421,7 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
"""Get the latest state from the service."""
|
||||
try:
|
||||
await self._device.refresh()
|
||||
self._attr_available = True
|
||||
except (
|
||||
aiosomecomfort.SomeComfortError,
|
||||
OSError,
|
||||
@@ -428,8 +429,10 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
try:
|
||||
await self._data.client.login()
|
||||
|
||||
except aiosomecomfort.SomeComfortError:
|
||||
except aiosomecomfort.AuthError:
|
||||
self._attr_available = False
|
||||
await self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self._data.entry_id)
|
||||
)
|
||||
except aiosomecomfort.SomeComfortError:
|
||||
self._attr_available = False
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["aiosomecomfort==0.0.10"]
|
||||
"requirements": ["aiosomecomfort==0.0.11"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Application, HTTPBadRequest, Request, StreamResponse, middleware
|
||||
|
||||
@@ -39,18 +40,24 @@ FILTERS: Final = re.compile(
|
||||
def setup_security_filter(app: Application) -> None:
|
||||
"""Create security filter middleware for the app."""
|
||||
|
||||
def _recursive_unquote(value: str) -> str:
|
||||
"""Handle values that are encoded multiple times."""
|
||||
if (unquoted := unquote(value)) != value:
|
||||
unquoted = _recursive_unquote(unquoted)
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def security_filter_middleware(
|
||||
request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(request.path):
|
||||
"""Process request and block commonly known exploit attempts."""
|
||||
if FILTERS.search(_recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a potential harmful request to: %s", request.raw_path
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
if FILTERS.search(request.query_string):
|
||||
if FILTERS.search(_recursive_unquote(request.query_string)):
|
||||
_LOGGER.warning(
|
||||
"Filtered a request with a potential harmful query string: %s",
|
||||
request.raw_path,
|
||||
|
||||
@@ -35,6 +35,7 @@ TRIGGER_TYPE = {
|
||||
"remote_double_button_long_press": "both {subtype} released after long press",
|
||||
"remote_double_button_short_press": "both {subtype} released",
|
||||
"initial_press": "{subtype} pressed initially",
|
||||
"long_press": "{subtype} long press",
|
||||
"repeat": "{subtype} held down",
|
||||
"short_release": "{subtype} released after short press",
|
||||
"long_release": "{subtype} released after long press",
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohue"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohue==4.6.1"],
|
||||
"requirements": ["aiohue==4.6.2"],
|
||||
"zeroconf": ["_hue._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -118,13 +118,14 @@ class HueSceneEntityBase(HueBaseEntity, SceneEntity):
|
||||
"""Return device (service) info."""
|
||||
# we create a virtual service/device for Hue scenes
|
||||
# so we have a parent for grouped lights and scenes
|
||||
group_type = self.group.type.value.title()
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.group.id)},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=self.group.metadata.name,
|
||||
manufacturer=self.bridge.api.config.bridge_device.product_data.manufacturer_name,
|
||||
model=self.group.type.value.title(),
|
||||
suggested_area=self.group.metadata.name,
|
||||
suggested_area=self.group.metadata.name if group_type == "Room" else None,
|
||||
via_device=(DOMAIN, self.bridge.api.config.bridge_device.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ DEFAULT_BUTTON_EVENT_TYPES = (
|
||||
ButtonEvent.INITIAL_PRESS,
|
||||
ButtonEvent.REPEAT,
|
||||
ButtonEvent.SHORT_RELEASE,
|
||||
ButtonEvent.LONG_PRESS,
|
||||
ButtonEvent.LONG_RELEASE,
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +55,13 @@ class HueBaseEntity(Entity):
|
||||
self._attr_unique_id = resource.id
|
||||
# device is precreated in main handler
|
||||
# this attaches the entity to the precreated device
|
||||
if self.device is not None:
|
||||
if self.device is None:
|
||||
# attach all device-less entities to the bridge itself
|
||||
# e.g. config based sensors like entertainment area
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, bridge.api.config.bridge.bridge_id)},
|
||||
)
|
||||
else:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.device.id)},
|
||||
)
|
||||
@@ -137,17 +143,14 @@ class HueBaseEntity(Entity):
|
||||
def _handle_event(self, event_type: EventType, resource: HueResource) -> None:
|
||||
"""Handle status event for this resource (or it's parent)."""
|
||||
if event_type == EventType.RESOURCE_DELETED:
|
||||
# remove any services created for zones/rooms
|
||||
# handle removal of room and zone 'virtual' devices/services
|
||||
# regular devices are removed automatically by the logic in device.py.
|
||||
if resource.type in (ResourceTypes.ROOM, ResourceTypes.ZONE):
|
||||
dev_reg = async_get_device_registry(self.hass)
|
||||
if device := dev_reg.async_get_device({(DOMAIN, resource.id)}):
|
||||
dev_reg.async_remove_device(device.id)
|
||||
if resource.type in (
|
||||
ResourceTypes.GROUPED_LIGHT,
|
||||
ResourceTypes.SCENE,
|
||||
ResourceTypes.SMART_SCENE,
|
||||
):
|
||||
# cleanup entities that are not strictly device-bound and have the bridge as parent
|
||||
if self.device is None:
|
||||
ent_reg = async_get_entity_registry(self.hass)
|
||||
ent_reg.async_remove(self.entity_id)
|
||||
return
|
||||
|
||||
@@ -153,6 +153,7 @@ async def async_setup_entry( # noqa: C901
|
||||
system.serial,
|
||||
svc_exception,
|
||||
)
|
||||
await system.aqualink.close()
|
||||
else:
|
||||
cur = system.online
|
||||
if cur and not prev:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable
|
||||
|
||||
import httpx
|
||||
from iaqualink.exception import AqualinkServiceException
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -12,5 +13,5 @@ async def await_or_reraise(awaitable: Awaitable) -> None:
|
||||
"""Execute API call while catching service exceptions."""
|
||||
try:
|
||||
await awaitable
|
||||
except AqualinkServiceException as svc_exception:
|
||||
except (AqualinkServiceException, httpx.HTTPError) as svc_exception:
|
||||
raise HomeAssistantError(f"Aqualink error: {svc_exception}") from svc_exception
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyinsteon", "pypubsub"],
|
||||
"requirements": [
|
||||
"pyinsteon==1.3.2",
|
||||
"insteon-frontend-home-assistant==0.3.2"
|
||||
"pyinsteon==1.3.4",
|
||||
"insteon-frontend-home-assistant==0.3.3"
|
||||
],
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
"""Utilities used by insteon component."""
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from pyinsteon import devices
|
||||
from pyinsteon.address import Address
|
||||
from pyinsteon.constants import ALDBStatus, DeviceAction
|
||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT
|
||||
from pyinsteon.device_types.device_base import Device
|
||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT, Event
|
||||
from pyinsteon.managers.link_manager import (
|
||||
async_enter_linking_mode,
|
||||
async_enter_unlinking_mode,
|
||||
@@ -27,7 +29,7 @@ from homeassistant.const import (
|
||||
CONF_PLATFORM,
|
||||
ENTITY_MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
@@ -89,49 +91,52 @@ from .schemas import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_on_off_event_device(hass, device):
|
||||
def _register_event(event: Event, listener: Callable) -> None:
|
||||
"""Register the events raised by a device."""
|
||||
_LOGGER.debug(
|
||||
"Registering on/off event for %s %d %s",
|
||||
str(event.address),
|
||||
event.group,
|
||||
event.name,
|
||||
)
|
||||
event.subscribe(listener, force_strong_ref=True)
|
||||
|
||||
|
||||
def add_on_off_event_device(hass: HomeAssistant, device: Device) -> None:
|
||||
"""Register an Insteon device as an on/off event device."""
|
||||
|
||||
@callback
|
||||
def async_fire_group_on_off_event(name, address, group, button):
|
||||
def async_fire_group_on_off_event(
|
||||
name: str, address: Address, group: int, button: str
|
||||
):
|
||||
# Firing an event when a button is pressed.
|
||||
if button and button[-2] == "_":
|
||||
button_id = button[-1].lower()
|
||||
else:
|
||||
button_id = None
|
||||
|
||||
schema = {CONF_ADDRESS: address}
|
||||
schema = {CONF_ADDRESS: address, "group": group}
|
||||
if button_id:
|
||||
schema[EVENT_CONF_BUTTON] = button_id
|
||||
if name == ON_EVENT:
|
||||
event = EVENT_GROUP_ON
|
||||
if name == OFF_EVENT:
|
||||
elif name == OFF_EVENT:
|
||||
event = EVENT_GROUP_OFF
|
||||
if name == ON_FAST_EVENT:
|
||||
elif name == ON_FAST_EVENT:
|
||||
event = EVENT_GROUP_ON_FAST
|
||||
if name == OFF_FAST_EVENT:
|
||||
elif name == OFF_FAST_EVENT:
|
||||
event = EVENT_GROUP_OFF_FAST
|
||||
else:
|
||||
event = f"insteon.{name}"
|
||||
_LOGGER.debug("Firing event %s with %s", event, schema)
|
||||
hass.bus.async_fire(event, schema)
|
||||
|
||||
for group in device.events:
|
||||
if isinstance(group, int):
|
||||
for event in device.events[group]:
|
||||
if event in [
|
||||
OFF_EVENT,
|
||||
ON_EVENT,
|
||||
OFF_FAST_EVENT,
|
||||
ON_FAST_EVENT,
|
||||
]:
|
||||
_LOGGER.debug(
|
||||
"Registering on/off event for %s %d %s",
|
||||
str(device.address),
|
||||
group,
|
||||
event,
|
||||
)
|
||||
device.events[group][event].subscribe(
|
||||
async_fire_group_on_off_event, force_strong_ref=True
|
||||
)
|
||||
for name_or_group, event in device.events.items():
|
||||
if isinstance(name_or_group, int):
|
||||
for _, event in device.events[name_or_group].items():
|
||||
_register_event(event, async_fire_group_on_off_event)
|
||||
else:
|
||||
_register_event(event, async_fire_group_on_off_event)
|
||||
|
||||
|
||||
def register_new_device_callback(hass):
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncGenerator
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from typing import Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -549,9 +550,12 @@ class KNXCommonFlow(ABC, FlowHandler):
|
||||
),
|
||||
None,
|
||||
)
|
||||
_tunnel_identifier = selected_tunnel_ia or self.new_entry_data.get(
|
||||
CONF_HOST
|
||||
)
|
||||
_tunnel_suffix = f" @ {_tunnel_identifier}" if _tunnel_identifier else ""
|
||||
self.new_title = (
|
||||
f"{'Secure ' if _if_user_id else ''}"
|
||||
f"Tunneling @ {selected_tunnel_ia or self.new_entry_data[CONF_HOST]}"
|
||||
f"{'Secure ' if _if_user_id else ''}Tunneling{_tunnel_suffix}"
|
||||
)
|
||||
return self.finish_flow()
|
||||
|
||||
@@ -708,7 +712,8 @@ class KNXCommonFlow(ABC, FlowHandler):
|
||||
else:
|
||||
dest_path = Path(self.hass.config.path(STORAGE_DIR, DOMAIN))
|
||||
dest_path.mkdir(exist_ok=True)
|
||||
file_path.rename(dest_path / DEFAULT_KNX_KEYRING_FILENAME)
|
||||
dest_file = dest_path / DEFAULT_KNX_KEYRING_FILENAME
|
||||
shutil.move(file_path, dest_file)
|
||||
return keyring, errors
|
||||
|
||||
keyring, errors = await self.hass.async_add_executor_job(_process_upload)
|
||||
|
||||
@@ -84,7 +84,7 @@ def ensure_zone(value):
|
||||
if value is None:
|
||||
raise vol.Invalid("zone value is None")
|
||||
|
||||
if str(value) not in ZONES is None:
|
||||
if str(value) not in ZONES:
|
||||
raise vol.Invalid("zone not valid")
|
||||
|
||||
return str(value)
|
||||
|
||||
@@ -140,7 +140,7 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = {
|
||||
name="Pet weight",
|
||||
native_unit_of_measurement=UnitOfMass.POUNDS,
|
||||
device_class=SensorDeviceClass.WEIGHT,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
],
|
||||
FeederRobot: [
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from contextlib import suppress
|
||||
from functools import wraps
|
||||
from functools import lru_cache, wraps
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
import secrets
|
||||
@@ -365,6 +365,12 @@ async def webhook_stream_camera(
|
||||
return webhook_response(resp, registration=config_entry.data)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
@WEBHOOK_COMMANDS.register("render_template")
|
||||
@validate_schema(
|
||||
{
|
||||
@@ -381,7 +387,7 @@ async def webhook_render_template(
|
||||
resp = {}
|
||||
for key, item in data.items():
|
||||
try:
|
||||
tpl = template.Template(item[ATTR_TEMPLATE], hass)
|
||||
tpl = _cached_template(item[ATTR_TEMPLATE], hass)
|
||||
resp[key] = tpl.async_render(item.get(ATTR_TEMPLATE_VARIABLES))
|
||||
except TemplateError as ex:
|
||||
resp[key] = {"error": str(ex)}
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from motionblinds import MotionDiscovery
|
||||
from motionblinds import MotionDiscovery, MotionGateway
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -86,6 +86,16 @@ class MotionBlindsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(mac_address)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
gateway = MotionGateway(ip=discovery_info.ip, key="abcd1234-56ef-78")
|
||||
try:
|
||||
# key not needed for GetDeviceList request
|
||||
await self.hass.async_add_executor_job(gateway.GetDeviceList)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return self.async_abort(reason="not_motionblinds")
|
||||
|
||||
if not gateway.available:
|
||||
return self.async_abort(reason="not_motionblinds")
|
||||
|
||||
short_mac = mac_address[-6:].upper()
|
||||
self.context["title_placeholders"] = {
|
||||
"short_mac": short_mac,
|
||||
|
||||
@@ -28,7 +28,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"not_motionblinds": "Discovered device is not a Motion gateway"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nuheat",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nuheat"],
|
||||
"requirements": ["nuheat==1.0.0"]
|
||||
"requirements": ["nuheat==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -9,11 +9,14 @@ from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
from python_otbr_api.pskc import compute_pskc
|
||||
|
||||
from homeassistant.components.thread import async_add_dataset
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -23,6 +26,18 @@ from .const import DOMAIN
|
||||
_R = TypeVar("_R")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
INSECURE_NETWORK_KEYS = (
|
||||
# Thread web UI default
|
||||
bytes.fromhex("00112233445566778899AABBCCDDEEFF"),
|
||||
)
|
||||
|
||||
INSECURE_PASSPHRASES = (
|
||||
# Thread web UI default
|
||||
"j01Nme",
|
||||
# Thread documentation default
|
||||
"J01NME",
|
||||
)
|
||||
|
||||
|
||||
def _handle_otbr_error(
|
||||
func: Callable[Concatenate[OTBRData, _P], Coroutine[Any, Any, _R]]
|
||||
@@ -46,11 +61,23 @@ class OTBRData:
|
||||
url: str
|
||||
api: python_otbr_api.OTBR
|
||||
|
||||
@_handle_otbr_error
|
||||
async def set_enabled(self, enabled: bool) -> None:
|
||||
"""Enable or disable the router."""
|
||||
return await self.api.set_enabled(enabled)
|
||||
|
||||
@_handle_otbr_error
|
||||
async def get_active_dataset_tlvs(self) -> bytes | None:
|
||||
"""Get current active operational dataset in TLVS format, or None."""
|
||||
return await self.api.get_active_dataset_tlvs()
|
||||
|
||||
@_handle_otbr_error
|
||||
async def create_active_dataset(
|
||||
self, dataset: python_otbr_api.OperationalDataSet
|
||||
) -> None:
|
||||
"""Create an active operational dataset."""
|
||||
return await self.api.create_active_dataset(dataset)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Open Thread Border Router component."""
|
||||
@@ -58,21 +85,65 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _warn_on_default_network_settings(
|
||||
hass: HomeAssistant, entry: ConfigEntry, dataset_tlvs: bytes
|
||||
) -> None:
|
||||
"""Warn user if insecure default network settings are used."""
|
||||
dataset = tlv_parser.parse_tlv(dataset_tlvs.hex())
|
||||
insecure = False
|
||||
|
||||
if (
|
||||
network_key := dataset.get(tlv_parser.MeshcopTLVType.NETWORKKEY)
|
||||
) is not None and bytes.fromhex(network_key) in INSECURE_NETWORK_KEYS:
|
||||
insecure = True
|
||||
if (
|
||||
not insecure
|
||||
and tlv_parser.MeshcopTLVType.EXTPANID in dataset
|
||||
and tlv_parser.MeshcopTLVType.NETWORKNAME in dataset
|
||||
and tlv_parser.MeshcopTLVType.PSKC in dataset
|
||||
):
|
||||
ext_pan_id = dataset[tlv_parser.MeshcopTLVType.EXTPANID]
|
||||
network_name = dataset[tlv_parser.MeshcopTLVType.NETWORKNAME]
|
||||
pskc = bytes.fromhex(dataset[tlv_parser.MeshcopTLVType.PSKC])
|
||||
for passphrase in INSECURE_PASSPHRASES:
|
||||
if pskc == compute_pskc(ext_pan_id, network_name, passphrase):
|
||||
insecure = True
|
||||
break
|
||||
|
||||
if insecure:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="insecure_thread_network",
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"insecure_thread_network_{entry.entry_id}",
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up an Open Thread Border Router config entry."""
|
||||
api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10)
|
||||
|
||||
otbrdata = OTBRData(entry.data["url"], api)
|
||||
try:
|
||||
dataset = await otbrdata.get_active_dataset_tlvs()
|
||||
dataset_tlvs = await otbrdata.get_active_dataset_tlvs()
|
||||
except (
|
||||
HomeAssistantError,
|
||||
aiohttp.ClientError,
|
||||
asyncio.TimeoutError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
if dataset:
|
||||
await async_add_dataset(hass, entry.title, dataset.hex())
|
||||
if dataset_tlvs:
|
||||
_warn_on_default_network_settings(hass, entry, dataset_tlvs)
|
||||
await async_add_dataset(hass, entry.title, dataset_tlvs.hex())
|
||||
|
||||
hass.data[DOMAIN] = otbrdata
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
|
||||
import aiohttp
|
||||
import python_otbr_api
|
||||
from python_otbr_api import tlv_parser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
@@ -15,7 +16,7 @@ from homeassistant.const import CONF_URL
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,11 +30,26 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Connect to the OTBR and create a dataset if it doesn't have one."""
|
||||
api = python_otbr_api.OTBR(url, async_get_clientsession(self.hass), 10)
|
||||
if await api.get_active_dataset_tlvs() is None:
|
||||
if dataset := await async_get_preferred_dataset(self.hass):
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(dataset))
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
thread_dataset_channel = None
|
||||
thread_dataset_tlv = await async_get_preferred_dataset(self.hass)
|
||||
if thread_dataset_tlv:
|
||||
dataset = tlv_parser.parse_tlv(thread_dataset_tlv)
|
||||
if channel_str := dataset.get(tlv_parser.MeshcopTLVType.CHANNEL):
|
||||
thread_dataset_channel = int(channel_str, base=16)
|
||||
|
||||
if thread_dataset_tlv is not None and zha_channel == thread_dataset_channel:
|
||||
await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv))
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"not importing TLV with channel %s", thread_dataset_channel
|
||||
)
|
||||
await api.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(network_name="home-assistant")
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
await api.set_enabled(True)
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"""Constants for the Open Thread Border Router integration."""
|
||||
|
||||
DOMAIN = "otbr"
|
||||
|
||||
DEFAULT_CHANNEL = 15
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.4"]
|
||||
"requirements": ["python-otbr-api==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,13 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"insecure_thread_network": {
|
||||
"title": "Insecure Thread network settings detected",
|
||||
"description": "Your Thread network is using a default network key or pass phrase.\n\nThis is a security risk, please create a new Thread network."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Websocket API for OTBR."""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import python_otbr_api
|
||||
|
||||
from homeassistant.components.websocket_api import (
|
||||
ActiveConnection,
|
||||
async_register_command,
|
||||
@@ -10,7 +12,7 @@ from homeassistant.components.websocket_api import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_CHANNEL, DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import OTBRData
|
||||
@@ -20,6 +22,7 @@ if TYPE_CHECKING:
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the OTBR Websocket API."""
|
||||
async_register_command(hass, websocket_info)
|
||||
async_register_command(hass, websocket_create_network)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
@@ -51,3 +54,48 @@ async def websocket_info(
|
||||
"active_dataset_tlvs": dataset.hex() if dataset else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_command(
|
||||
{
|
||||
"type": "otbr/create_network",
|
||||
}
|
||||
)
|
||||
@async_response
|
||||
async def websocket_create_network(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Create a new Thread network."""
|
||||
if DOMAIN not in hass.data:
|
||||
connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded")
|
||||
return
|
||||
|
||||
# We currently have no way to know which channel zha is using, assume it's
|
||||
# the default
|
||||
zha_channel = DEFAULT_CHANNEL
|
||||
|
||||
data: OTBRData = hass.data[DOMAIN]
|
||||
|
||||
try:
|
||||
await data.set_enabled(False)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.create_active_dataset(
|
||||
python_otbr_api.OperationalDataSet(
|
||||
channel=zha_channel, network_name="home-assistant"
|
||||
)
|
||||
)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "create_active_dataset_failed", str(exc))
|
||||
return
|
||||
|
||||
try:
|
||||
await data.set_enabled(True)
|
||||
except HomeAssistantError as exc:
|
||||
connection.send_error(msg["id"], "set_enabled_failed", str(exc))
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -17,7 +17,6 @@ from sqlalchemy.orm.query import Query
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
|
||||
from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
@@ -592,48 +591,6 @@ def get_last_state_changes(
|
||||
)
|
||||
|
||||
|
||||
def _generate_most_recent_states_for_entities_by_date(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states for specific entities by date."""
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_entities_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
@@ -645,16 +602,29 @@ def _get_states_for_entities_stmt(
|
||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||
schema_version, no_attributes, include_last_changed=True
|
||||
)
|
||||
most_recent_states_for_entities_by_date = (
|
||||
_generate_most_recent_states_for_entities_by_date(
|
||||
schema_version, run_start, utc_point_in_time, entity_ids
|
||||
)
|
||||
)
|
||||
# We got an include-list of entities, accelerate the query by filtering already
|
||||
# in the inner query.
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
(
|
||||
most_recent_states_for_entities_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id
|
||||
== most_recent_states_for_entities_by_date.c.max_entity_id,
|
||||
@@ -664,7 +634,21 @@ def _get_states_for_entities_stmt(
|
||||
)
|
||||
else:
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
(
|
||||
most_recent_states_for_entities_by_date := select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
),
|
||||
and_(
|
||||
States.entity_id
|
||||
== most_recent_states_for_entities_by_date.c.max_entity_id,
|
||||
@@ -679,45 +663,6 @@ def _get_states_for_entities_stmt(
|
||||
return stmt
|
||||
|
||||
|
||||
def _generate_most_recent_states_by_date(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
) -> Subquery:
|
||||
"""Generate the sub query for the most recent states by date."""
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
return (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_all_stmt(
|
||||
schema_version: int,
|
||||
run_start: datetime,
|
||||
@@ -733,12 +678,26 @@ def _get_states_for_all_stmt(
|
||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||
# This filtering can't be done in the inner query because the domain column is
|
||||
# not indexed and we can't control what's in the custom filter.
|
||||
most_recent_states_by_date = _generate_most_recent_states_by_date(
|
||||
schema_version, run_start, utc_point_in_time
|
||||
)
|
||||
if schema_version >= 31:
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_by_date,
|
||||
(
|
||||
most_recent_states_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id == most_recent_states_by_date.c.max_entity_id,
|
||||
States.last_updated_ts == most_recent_states_by_date.c.max_last_updated,
|
||||
@@ -746,7 +705,22 @@ def _get_states_for_all_stmt(
|
||||
)
|
||||
else:
|
||||
stmt += lambda q: q.join(
|
||||
most_recent_states_by_date,
|
||||
(
|
||||
most_recent_states_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(States.last_updated).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated >= run_start)
|
||||
& (States.last_updated < utc_point_in_time)
|
||||
)
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id == most_recent_states_by_date.c.max_entity_id,
|
||||
States.last_updated == most_recent_states_by_date.c.max_last_updated,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sqlalchemy==2.0.4", "fnvhash==0.1.0"]
|
||||
"requirements": ["sqlalchemy==2.0.5.post1", "fnvhash==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ from .tasks import (
|
||||
PostSchemaMigrationTask,
|
||||
StatisticsTimestampMigrationCleanupTask,
|
||||
)
|
||||
from .util import session_scope
|
||||
from .util import database_job_retry_wrapper, session_scope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import Recorder
|
||||
@@ -158,7 +158,9 @@ def migrate_schema(
|
||||
hass.add_job(instance.async_set_db_ready)
|
||||
new_version = version + 1
|
||||
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
|
||||
_apply_update(hass, engine, session_maker, new_version, current_version)
|
||||
_apply_update(
|
||||
instance, hass, engine, session_maker, new_version, current_version
|
||||
)
|
||||
with session_scope(session=session_maker()) as session:
|
||||
session.add(SchemaChanges(schema_version=new_version))
|
||||
|
||||
@@ -508,7 +510,9 @@ def _drop_foreign_key_constraints(
|
||||
)
|
||||
|
||||
|
||||
@database_job_retry_wrapper("Apply migration update", 10)
|
||||
def _apply_update( # noqa: C901
|
||||
instance: Recorder,
|
||||
hass: HomeAssistant,
|
||||
engine: Engine,
|
||||
session_maker: Callable[[], Session],
|
||||
@@ -922,7 +926,7 @@ def _apply_update( # noqa: C901
|
||||
# There may be duplicated statistics entries, delete duplicates
|
||||
# and try again
|
||||
with session_scope(session=session_maker()) as session:
|
||||
delete_statistics_duplicates(hass, session)
|
||||
delete_statistics_duplicates(instance, hass, session)
|
||||
_migrate_statistics_columns_to_timestamp(session_maker, engine)
|
||||
# Log at error level to ensure the user sees this message in the log
|
||||
# since we logged the error above.
|
||||
@@ -965,7 +969,7 @@ def post_schema_migration(
|
||||
# since they are no longer used and take up a significant amount of space.
|
||||
assert instance.event_session is not None
|
||||
assert instance.engine is not None
|
||||
_wipe_old_string_time_columns(instance.engine, instance.event_session)
|
||||
_wipe_old_string_time_columns(instance, instance.engine, instance.event_session)
|
||||
if old_version < 35 <= new_version:
|
||||
# In version 34 we migrated all the created, start, and last_reset
|
||||
# columns to be timestamps. In version 34 we need to wipe the old columns
|
||||
@@ -978,7 +982,10 @@ def _wipe_old_string_statistics_columns(instance: Recorder) -> None:
|
||||
instance.queue_task(StatisticsTimestampMigrationCleanupTask())
|
||||
|
||||
|
||||
def _wipe_old_string_time_columns(engine: Engine, session: Session) -> None:
|
||||
@database_job_retry_wrapper("Wipe old string time columns", 3)
|
||||
def _wipe_old_string_time_columns(
|
||||
instance: Recorder, engine: Engine, session: Session
|
||||
) -> None:
|
||||
"""Wipe old string time columns to save space."""
|
||||
# Wipe Events.time_fired since its been replaced by Events.time_fired_ts
|
||||
# Wipe States.last_updated since its been replaced by States.last_updated_ts
|
||||
@@ -1162,7 +1169,7 @@ def _migrate_statistics_columns_to_timestamp(
|
||||
"last_reset_ts="
|
||||
"UNIX_TIMESTAMP(last_reset) "
|
||||
"where start_ts is NULL "
|
||||
"LIMIT 250000;"
|
||||
"LIMIT 100000;"
|
||||
)
|
||||
)
|
||||
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
|
||||
@@ -1180,7 +1187,7 @@ def _migrate_statistics_columns_to_timestamp(
|
||||
"created_ts=EXTRACT(EPOCH FROM created), "
|
||||
"last_reset_ts=EXTRACT(EPOCH FROM last_reset) "
|
||||
"where id IN ( "
|
||||
f"SELECT id FROM {table} where start_ts is NULL LIMIT 250000 "
|
||||
f"SELECT id FROM {table} where start_ts is NULL LIMIT 100000 "
|
||||
" );"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -16,14 +16,13 @@ import re
|
||||
from statistics import mean
|
||||
from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
|
||||
from sqlalchemy import and_, bindparam, func, lambda_stmt, select, text
|
||||
from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.exc import OperationalError, SQLAlchemyError, StatementError
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal_column, true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.sql.selectable import Subquery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT
|
||||
@@ -75,6 +74,7 @@ from .models import (
|
||||
datetime_to_timestamp_or_none,
|
||||
)
|
||||
from .util import (
|
||||
database_job_retry_wrapper,
|
||||
execute,
|
||||
execute_stmt_lambda_element,
|
||||
get_instance,
|
||||
@@ -515,7 +515,10 @@ def _delete_duplicates_from_table(
|
||||
return (total_deleted_rows, all_non_identical_duplicates)
|
||||
|
||||
|
||||
def delete_statistics_duplicates(hass: HomeAssistant, session: Session) -> None:
|
||||
@database_job_retry_wrapper("delete statistics duplicates", 3)
|
||||
def delete_statistics_duplicates(
|
||||
instance: Recorder, hass: HomeAssistant, session: Session
|
||||
) -> None:
|
||||
"""Identify and delete duplicated statistics.
|
||||
|
||||
A backup will be made of duplicated statistics before it is deleted.
|
||||
@@ -646,27 +649,19 @@ def _compile_hourly_statistics_summary_mean_stmt(
|
||||
)
|
||||
|
||||
|
||||
def _compile_hourly_statistics_last_sum_stmt_subquery(
|
||||
start_time_ts: float, end_time_ts: float
|
||||
) -> Subquery:
|
||||
"""Generate the summary mean statement for hourly statistics."""
|
||||
return (
|
||||
select(*QUERY_STATISTICS_SUMMARY_SUM)
|
||||
.filter(StatisticsShortTerm.start_ts >= start_time_ts)
|
||||
.filter(StatisticsShortTerm.start_ts < end_time_ts)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
def _compile_hourly_statistics_last_sum_stmt(
|
||||
start_time_ts: float, end_time_ts: float
|
||||
) -> StatementLambdaElement:
|
||||
"""Generate the summary mean statement for hourly statistics."""
|
||||
subquery = _compile_hourly_statistics_last_sum_stmt_subquery(
|
||||
start_time_ts, end_time_ts
|
||||
)
|
||||
return lambda_stmt(
|
||||
lambda: select(subquery)
|
||||
lambda: select(
|
||||
subquery := (
|
||||
select(*QUERY_STATISTICS_SUMMARY_SUM)
|
||||
.filter(StatisticsShortTerm.start_ts >= start_time_ts)
|
||||
.filter(StatisticsShortTerm.start_ts < end_time_ts)
|
||||
.subquery()
|
||||
)
|
||||
)
|
||||
.filter(subquery.c.rownum == 1)
|
||||
.order_by(subquery.c.metadata_id)
|
||||
)
|
||||
@@ -1263,7 +1258,8 @@ def _reduce_statistics_per_month(
|
||||
)
|
||||
|
||||
|
||||
def _statistics_during_period_stmt(
|
||||
def _generate_statistics_during_period_stmt(
|
||||
columns: Select,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
metadata_ids: list[int] | None,
|
||||
@@ -1275,21 +1271,6 @@ def _statistics_during_period_stmt(
|
||||
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||
"""
|
||||
start_time_ts = start_time.timestamp()
|
||||
|
||||
columns = select(table.metadata_id, table.start_ts)
|
||||
if "last_reset" in types:
|
||||
columns = columns.add_columns(table.last_reset_ts)
|
||||
if "max" in types:
|
||||
columns = columns.add_columns(table.max)
|
||||
if "mean" in types:
|
||||
columns = columns.add_columns(table.mean)
|
||||
if "min" in types:
|
||||
columns = columns.add_columns(table.min)
|
||||
if "state" in types:
|
||||
columns = columns.add_columns(table.state)
|
||||
if "sum" in types:
|
||||
columns = columns.add_columns(table.sum)
|
||||
|
||||
stmt = lambda_stmt(lambda: columns.filter(table.start_ts >= start_time_ts))
|
||||
if end_time is not None:
|
||||
end_time_ts = end_time.timestamp()
|
||||
@@ -1303,6 +1284,23 @@ def _statistics_during_period_stmt(
|
||||
return stmt
|
||||
|
||||
|
||||
def _generate_max_mean_min_statistic_in_sub_period_stmt(
|
||||
columns: Select,
|
||||
start_time: datetime | None,
|
||||
end_time: datetime | None,
|
||||
table: type[StatisticsBase],
|
||||
metadata_id: int,
|
||||
) -> StatementLambdaElement:
|
||||
stmt = lambda_stmt(lambda: columns.filter(table.metadata_id == metadata_id))
|
||||
if start_time is not None:
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt += lambda q: q.filter(table.start_ts >= start_time_ts)
|
||||
if end_time is not None:
|
||||
end_time_ts = end_time.timestamp()
|
||||
stmt += lambda q: q.filter(table.start_ts < end_time_ts)
|
||||
return stmt
|
||||
|
||||
|
||||
def _get_max_mean_min_statistic_in_sub_period(
|
||||
session: Session,
|
||||
result: dict[str, float],
|
||||
@@ -1328,13 +1326,9 @@ def _get_max_mean_min_statistic_in_sub_period(
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
columns = columns.add_columns(func.min(table.min))
|
||||
stmt = lambda_stmt(lambda: columns.filter(table.metadata_id == metadata_id))
|
||||
if start_time is not None:
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt += lambda q: q.filter(table.start_ts >= start_time_ts)
|
||||
if end_time is not None:
|
||||
end_time_ts = end_time.timestamp()
|
||||
stmt += lambda q: q.filter(table.start_ts < end_time_ts)
|
||||
stmt = _generate_max_mean_min_statistic_in_sub_period_stmt(
|
||||
columns, start_time, end_time, table, metadata_id
|
||||
)
|
||||
stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt))
|
||||
if not stats:
|
||||
return
|
||||
@@ -1749,8 +1743,21 @@ def _statistics_during_period_with_session(
|
||||
table: type[Statistics | StatisticsShortTerm] = (
|
||||
Statistics if period != "5minute" else StatisticsShortTerm
|
||||
)
|
||||
stmt = _statistics_during_period_stmt(
|
||||
start_time, end_time, metadata_ids, table, types
|
||||
columns = select(table.metadata_id, table.start_ts) # type: ignore[call-overload]
|
||||
if "last_reset" in types:
|
||||
columns = columns.add_columns(table.last_reset_ts)
|
||||
if "max" in types:
|
||||
columns = columns.add_columns(table.max)
|
||||
if "mean" in types:
|
||||
columns = columns.add_columns(table.mean)
|
||||
if "min" in types:
|
||||
columns = columns.add_columns(table.min)
|
||||
if "state" in types:
|
||||
columns = columns.add_columns(table.state)
|
||||
if "sum" in types:
|
||||
columns = columns.add_columns(table.sum)
|
||||
stmt = _generate_statistics_during_period_stmt(
|
||||
columns, start_time, end_time, metadata_ids, table, types
|
||||
)
|
||||
stats = cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
|
||||
@@ -1915,28 +1922,24 @@ def get_last_short_term_statistics(
|
||||
)
|
||||
|
||||
|
||||
def _generate_most_recent_statistic_row(metadata_ids: list[int]) -> Subquery:
|
||||
"""Generate the subquery to find the most recent statistic row."""
|
||||
return (
|
||||
select(
|
||||
StatisticsShortTerm.metadata_id,
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(StatisticsShortTerm.start_ts).label("start_max"),
|
||||
)
|
||||
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
).subquery()
|
||||
|
||||
|
||||
def _latest_short_term_statistics_stmt(
|
||||
metadata_ids: list[int],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the latest short term stat rows."""
|
||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||
most_recent_statistic_row = _generate_most_recent_statistic_row(metadata_ids)
|
||||
stmt += lambda s: s.join(
|
||||
most_recent_statistic_row,
|
||||
(
|
||||
most_recent_statistic_row := (
|
||||
select(
|
||||
StatisticsShortTerm.metadata_id,
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(StatisticsShortTerm.start_ts).label("start_max"),
|
||||
)
|
||||
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||
.group_by(StatisticsShortTerm.metadata_id)
|
||||
).subquery()
|
||||
),
|
||||
(
|
||||
StatisticsShortTerm.metadata_id # pylint: disable=comparison-with-callable
|
||||
== most_recent_statistic_row.c.metadata_id
|
||||
@@ -1984,21 +1987,34 @@ def get_latest_short_term_statistics(
|
||||
)
|
||||
|
||||
|
||||
def _get_most_recent_statistics_subquery(
|
||||
metadata_ids: set[int], table: type[StatisticsBase], start_time_ts: float
|
||||
) -> Subquery:
|
||||
"""Generate the subquery to find the most recent statistic row."""
|
||||
return (
|
||||
select(
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(table.start_ts).label("max_start_ts"),
|
||||
table.metadata_id.label("max_metadata_id"),
|
||||
def _generate_statistics_at_time_stmt(
|
||||
columns: Select,
|
||||
table: type[StatisticsBase],
|
||||
metadata_ids: set[int],
|
||||
start_time_ts: float,
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the statistics for a given time."""
|
||||
return lambda_stmt(
|
||||
lambda: columns.join(
|
||||
(
|
||||
most_recent_statistic_ids := (
|
||||
select(
|
||||
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
|
||||
# pylint: disable-next=not-callable
|
||||
func.max(table.start_ts).label("max_start_ts"),
|
||||
table.metadata_id.label("max_metadata_id"),
|
||||
)
|
||||
.filter(table.start_ts < start_time_ts)
|
||||
.filter(table.metadata_id.in_(metadata_ids))
|
||||
.group_by(table.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
table.start_ts == most_recent_statistic_ids.c.max_start_ts,
|
||||
table.metadata_id == most_recent_statistic_ids.c.max_metadata_id,
|
||||
),
|
||||
)
|
||||
.filter(table.start_ts < start_time_ts)
|
||||
.filter(table.metadata_id.in_(metadata_ids))
|
||||
.group_by(table.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
@@ -2023,19 +2039,10 @@ def _statistics_at_time(
|
||||
columns = columns.add_columns(table.state)
|
||||
if "sum" in types:
|
||||
columns = columns.add_columns(table.sum)
|
||||
|
||||
start_time_ts = start_time.timestamp()
|
||||
most_recent_statistic_ids = _get_most_recent_statistics_subquery(
|
||||
metadata_ids, table, start_time_ts
|
||||
stmt = _generate_statistics_at_time_stmt(
|
||||
columns, table, metadata_ids, start_time_ts
|
||||
)
|
||||
stmt = lambda_stmt(lambda: columns).join(
|
||||
most_recent_statistic_ids,
|
||||
and_(
|
||||
table.start_ts == most_recent_statistic_ids.c.max_start_ts,
|
||||
table.metadata_id == most_recent_statistic_ids.c.max_metadata_id,
|
||||
),
|
||||
)
|
||||
|
||||
return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
|
||||
|
||||
|
||||
@@ -568,6 +568,17 @@ def end_incomplete_runs(session: Session, start_time: datetime) -> None:
|
||||
session.add(run)
|
||||
|
||||
|
||||
def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool:
|
||||
"""Return True if the error is retryable."""
|
||||
assert instance.engine is not None
|
||||
return bool(
|
||||
instance.engine.dialect.name == SupportedDialect.MYSQL
|
||||
and isinstance(err.orig, BaseException)
|
||||
and err.orig.args
|
||||
and err.orig.args[0] in RETRYABLE_MYSQL_ERRORS
|
||||
)
|
||||
|
||||
|
||||
_FuncType = Callable[Concatenate[_RecorderT, _P], bool]
|
||||
|
||||
|
||||
@@ -585,12 +596,8 @@ def retryable_database_job(
|
||||
try:
|
||||
return job(instance, *args, **kwargs)
|
||||
except OperationalError as err:
|
||||
assert instance.engine is not None
|
||||
if (
|
||||
instance.engine.dialect.name == SupportedDialect.MYSQL
|
||||
and err.orig
|
||||
and err.orig.args[0] in RETRYABLE_MYSQL_ERRORS
|
||||
):
|
||||
if _is_retryable_error(instance, err):
|
||||
assert isinstance(err.orig, BaseException)
|
||||
_LOGGER.info(
|
||||
"%s; %s not completed, retrying", err.orig.args[1], description
|
||||
)
|
||||
@@ -608,6 +615,46 @@ def retryable_database_job(
|
||||
return decorator
|
||||
|
||||
|
||||
_WrappedFuncType = Callable[Concatenate[_RecorderT, _P], None]
|
||||
|
||||
|
||||
def database_job_retry_wrapper(
|
||||
description: str, attempts: int = 5
|
||||
) -> Callable[[_WrappedFuncType[_RecorderT, _P]], _WrappedFuncType[_RecorderT, _P]]:
|
||||
"""Try to execute a database job multiple times.
|
||||
|
||||
This wrapper handles InnoDB deadlocks and lock timeouts.
|
||||
|
||||
This is different from retryable_database_job in that it will retry the job
|
||||
attempts number of times instead of returning False if the job fails.
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
job: _WrappedFuncType[_RecorderT, _P]
|
||||
) -> _WrappedFuncType[_RecorderT, _P]:
|
||||
@functools.wraps(job)
|
||||
def wrapper(instance: _RecorderT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
for attempt in range(attempts):
|
||||
try:
|
||||
job(instance, *args, **kwargs)
|
||||
return
|
||||
except OperationalError as err:
|
||||
if attempt == attempts - 1 or not _is_retryable_error(
|
||||
instance, err
|
||||
):
|
||||
raise
|
||||
assert isinstance(err.orig, BaseException)
|
||||
_LOGGER.info(
|
||||
"%s; %s failed, retrying", err.orig.args[1], description
|
||||
)
|
||||
time.sleep(instance.db_retry_wait)
|
||||
# Failed with retryable error
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def periodic_db_cleanups(instance: Recorder) -> None:
|
||||
"""Run any database cleanups that need to happen periodically.
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ NUMBER_ENTITIES = (
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["max"],
|
||||
supported=lambda api, ch: api.zoom_supported(ch),
|
||||
value=lambda api, ch: api.get_focus(ch),
|
||||
method=lambda api, ch, value: api.set_zoom(ch, int(value)),
|
||||
method=lambda api, ch, value: api.set_focus(ch, int(value)),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -271,15 +271,20 @@ class SensorEntity(Entity):
|
||||
@property
|
||||
def _numeric_state_expected(self) -> bool:
|
||||
"""Return true if the sensor must be numeric."""
|
||||
# Note: the order of the checks needs to be kept aligned
|
||||
# with the checks in `state` property.
|
||||
device_class = try_parse_enum(SensorDeviceClass, self.device_class)
|
||||
if device_class in NON_NUMERIC_DEVICE_CLASSES:
|
||||
return False
|
||||
if (
|
||||
self.state_class is not None
|
||||
or self.native_unit_of_measurement is not None
|
||||
or self.suggested_display_precision is not None
|
||||
):
|
||||
return True
|
||||
# Sensors with custom device classes are not considered numeric
|
||||
device_class = try_parse_enum(SensorDeviceClass, self.device_class)
|
||||
return device_class not in {None, *NON_NUMERIC_DEVICE_CLASSES}
|
||||
# Sensors with custom device classes will have the device class
|
||||
# converted to None and are not considered numeric
|
||||
return device_class is not None
|
||||
|
||||
@property
|
||||
def options(self) -> list[str] | None:
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
"""SFR Box."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from sfrbox_api.bridge import SFRBox
|
||||
from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
@@ -40,15 +38,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass, box, "system", lambda b: b.system_get_info()
|
||||
),
|
||||
)
|
||||
tasks = [
|
||||
data.dsl.async_config_entry_first_refresh(),
|
||||
data.system.async_config_entry_first_refresh(),
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
await data.system.async_config_entry_first_refresh()
|
||||
system_info = data.system.data
|
||||
|
||||
if system_info.net_infra == "adsl":
|
||||
await data.dsl.async_config_entry_first_refresh()
|
||||
else:
|
||||
platforms = list(platforms)
|
||||
platforms.remove(Platform.BINARY_SENSOR)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = data
|
||||
|
||||
system_info = data.system.data
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""SFR Box sensor platform."""
|
||||
from collections.abc import Callable, Iterable
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from itertools import chain
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from sfrbox_api.models import DslInfo, SystemInfo
|
||||
@@ -204,16 +203,15 @@ async def async_setup_entry(
|
||||
"""Set up the sensors."""
|
||||
data: DomainData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
entities: Iterable[SFRBoxSensor] = chain(
|
||||
(
|
||||
entities: list[SFRBoxSensor] = [
|
||||
SFRBoxSensor(data.system, description, data.system.data)
|
||||
for description in SYSTEM_SENSOR_TYPES
|
||||
]
|
||||
if data.system.data.net_infra == "adsl":
|
||||
entities.extend(
|
||||
SFRBoxSensor(data.dsl, description, data.system.data)
|
||||
for description in DSL_SENSOR_TYPES
|
||||
),
|
||||
(
|
||||
SFRBoxSensor(data.system, description, data.system.data)
|
||||
for description in SYSTEM_SENSOR_TYPES
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"domain": "snapcast",
|
||||
"name": "Snapcast",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@luar123"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/snapcast",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["construct", "snapcast"],
|
||||
"requirements": ["snapcast==2.3.0"]
|
||||
"requirements": ["snapcast==2.3.2"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sql",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["sqlalchemy==2.0.4"]
|
||||
"requirements": ["sqlalchemy==2.0.5.post1"]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ set_climate_timer:
|
||||
description: Choose this or Overlay. Set the time period for the change if you want to be specific. Alternatively use Overlay
|
||||
required: false
|
||||
example: "01:30:00"
|
||||
default: "01:00:00"
|
||||
selector:
|
||||
text:
|
||||
requested_overlay:
|
||||
@@ -29,7 +28,6 @@ set_climate_timer:
|
||||
description: Choose this or Time Period. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on tado app setting
|
||||
required: false
|
||||
example: "MANUAL"
|
||||
default: "TADO_DEFAULT"
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
|
||||
@@ -233,6 +233,9 @@ class CoverTemplate(TemplateEntity, CoverEntity):
|
||||
if not self._position_template:
|
||||
self._position = None
|
||||
|
||||
self._is_opening = False
|
||||
self._is_closing = False
|
||||
|
||||
@callback
|
||||
def _update_position(self, result):
|
||||
try:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""Config flow for the Thread integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import onboarding, zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
@@ -32,4 +34,12 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> FlowResult:
|
||||
"""Set up because the user has border routers."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Confirm the setup."""
|
||||
if user_input is not None or not onboarding.async_is_onboarded(self.hass):
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
return self.async_show_form(step_id="confirm")
|
||||
|
||||
@@ -17,9 +17,8 @@ some of their thread accessories can't be pinged, but it's still a thread proble
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, TypedDict
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
from pyroute2 import NDB # pylint: disable=no-name-in-module
|
||||
from python_otbr_api.tlv_parser import MeshcopTLVType
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
@@ -29,6 +28,9 @@ from homeassistant.core import HomeAssistant
|
||||
from .dataset_store import async_get_store
|
||||
from .discovery import async_read_zeroconf_cache
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pyroute2 import NDB # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
class Neighbour(TypedDict):
|
||||
"""A neighbour cache entry (ip neigh)."""
|
||||
@@ -67,58 +69,69 @@ class Network(TypedDict):
|
||||
unexpected_routers: set[str]
|
||||
|
||||
|
||||
def _get_possible_thread_routes() -> (
|
||||
tuple[dict[str, dict[str, Route]], dict[str, set[str]]]
|
||||
):
|
||||
def _get_possible_thread_routes(
|
||||
ndb: NDB,
|
||||
) -> tuple[dict[str, dict[str, Route]], dict[str, set[str]]]:
|
||||
# Build a list of possible thread routes
|
||||
# Right now, this is ipv6 /64's that have a gateway
|
||||
# We cross reference with zerconf data to confirm which via's are known border routers
|
||||
routes: dict[str, dict[str, Route]] = {}
|
||||
reverse_routes: dict[str, set[str]] = {}
|
||||
|
||||
with NDB() as ndb:
|
||||
for record in ndb.routes:
|
||||
# Limit to IPV6 routes
|
||||
if record.family != 10:
|
||||
continue
|
||||
# Limit to /64 prefixes
|
||||
if record.dst_len != 64:
|
||||
continue
|
||||
# Limit to routes with a via
|
||||
if not record.gateway and not record.nh_gateway:
|
||||
continue
|
||||
gateway = record.gateway or record.nh_gateway
|
||||
route = routes.setdefault(gateway, {})
|
||||
route[record.dst] = {
|
||||
"metrics": record.metrics,
|
||||
"priority": record.priority,
|
||||
# NM creates "nexthop" routes - a single route with many via's
|
||||
# Kernel creates many routes with a single via
|
||||
"is_nexthop": record.nh_gateway is not None,
|
||||
}
|
||||
reverse_routes.setdefault(record.dst, set()).add(gateway)
|
||||
for record in ndb.routes:
|
||||
# Limit to IPV6 routes
|
||||
if record.family != 10:
|
||||
continue
|
||||
# Limit to /64 prefixes
|
||||
if record.dst_len != 64:
|
||||
continue
|
||||
# Limit to routes with a via
|
||||
if not record.gateway and not record.nh_gateway:
|
||||
continue
|
||||
gateway = record.gateway or record.nh_gateway
|
||||
route = routes.setdefault(gateway, {})
|
||||
route[record.dst] = {
|
||||
"metrics": record.metrics,
|
||||
"priority": record.priority,
|
||||
# NM creates "nexthop" routes - a single route with many via's
|
||||
# Kernel creates many routes with a single via
|
||||
"is_nexthop": record.nh_gateway is not None,
|
||||
}
|
||||
reverse_routes.setdefault(record.dst, set()).add(gateway)
|
||||
return routes, reverse_routes
|
||||
|
||||
|
||||
def _get_neighbours() -> dict[str, Neighbour]:
|
||||
neighbours: dict[str, Neighbour] = {}
|
||||
|
||||
with NDB() as ndb:
|
||||
for record in ndb.neighbours:
|
||||
neighbours[record.dst] = {
|
||||
"lladdr": record.lladdr,
|
||||
"state": record.state,
|
||||
"probes": record.probes,
|
||||
}
|
||||
|
||||
def _get_neighbours(ndb: NDB) -> dict[str, Neighbour]:
|
||||
# Build a list of neighbours
|
||||
neighbours: dict[str, Neighbour] = {
|
||||
record.dst: {
|
||||
"lladdr": record.lladdr,
|
||||
"state": record.state,
|
||||
"probes": record.probes,
|
||||
}
|
||||
for record in ndb.neighbours
|
||||
}
|
||||
return neighbours
|
||||
|
||||
|
||||
def _get_routes_and_neighbors():
|
||||
"""Get the routes and neighbours from pyroute2."""
|
||||
# Import in the executor since import NDB can take a while
|
||||
from pyroute2 import ( # pylint: disable=no-name-in-module, import-outside-toplevel
|
||||
NDB,
|
||||
)
|
||||
|
||||
with NDB() as ndb: # pylint: disable=not-callable
|
||||
routes, reverse_routes = _get_possible_thread_routes(ndb)
|
||||
neighbours = _get_neighbours(ndb)
|
||||
|
||||
return routes, reverse_routes, neighbours
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for all known thread networks."""
|
||||
|
||||
networks: dict[str, Network] = {}
|
||||
|
||||
# Start with all networks that HA knows about
|
||||
@@ -140,13 +153,12 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
# Find all routes currently act that might be thread related, so we can match them to
|
||||
# border routers as we process the zeroconf data.
|
||||
routes, reverse_routes = await hass.async_add_executor_job(
|
||||
_get_possible_thread_routes
|
||||
#
|
||||
# Also find all neighbours
|
||||
routes, reverse_routes, neighbours = await hass.async_add_executor_job(
|
||||
_get_routes_and_neighbors
|
||||
)
|
||||
|
||||
# Find all neighbours
|
||||
neighbours = await hass.async_add_executor_job(_get_neighbours)
|
||||
|
||||
aiozc = await zeroconf.async_get_async_instance(hass)
|
||||
for data in async_read_zeroconf_cache(aiozc):
|
||||
if not data.extended_pan_id:
|
||||
|
||||
@@ -18,6 +18,7 @@ KNOWN_BRANDS: dict[str | None, str] = {
|
||||
"Apple Inc.": "apple",
|
||||
"Google Inc.": "google",
|
||||
"HomeAssistant": "homeassistant",
|
||||
"Home Assistant": "homeassistant",
|
||||
}
|
||||
THREAD_TYPE = "_meshcop._udp.local."
|
||||
CLASS_IN = 1
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/thread",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==1.0.4", "pyroute2==0.7.5"],
|
||||
"requirements": ["python-otbr-api==1.0.5", "pyroute2==0.7.5"],
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
||||
9
homeassistant/components/thread/strings.json
Normal file
9
homeassistant/components/thread/strings.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,17 +53,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
if not tibber_connection.name:
|
||||
raise ConfigEntryNotReady("Could not fetch Tibber data.")
|
||||
|
||||
except asyncio.TimeoutError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Error connecting to Tibber: %s ", err)
|
||||
return False
|
||||
except (
|
||||
asyncio.TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpException,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLogin as exp:
|
||||
_LOGGER.error("Failed to login. %s", exp)
|
||||
return False
|
||||
except tibber.FatalHttpException:
|
||||
return False
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -44,10 +44,14 @@ class TibberConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await tibber_connection.update_info()
|
||||
except asyncio.TimeoutError:
|
||||
errors[CONF_ACCESS_TOKEN] = "timeout"
|
||||
except aiohttp.ClientError:
|
||||
errors[CONF_ACCESS_TOKEN] = "cannot_connect"
|
||||
except tibber.InvalidLogin:
|
||||
errors[CONF_ACCESS_TOKEN] = "invalid_access_token"
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpException,
|
||||
tibber.FatalHttpException,
|
||||
):
|
||||
errors[CONF_ACCESS_TOKEN] = "cannot_connect"
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyTibber==0.26.13"]
|
||||
"requirements": ["pyTibber==0.27.0"]
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ from homeassistant.helpers.entity_registry import async_get as async_get_entity_
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
from homeassistant.util import Throttle, dt as dt_util
|
||||
|
||||
@@ -559,6 +560,8 @@ class TibberRtDataCoordinator(DataUpdateCoordinator):
|
||||
class TibberDataCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Handle Tibber data and insert statistics."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, tibber_connection: tibber.Tibber) -> None:
|
||||
"""Initialize the data handler."""
|
||||
super().__init__(
|
||||
@@ -571,9 +574,17 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data via API."""
|
||||
await self._tibber_connection.fetch_consumption_data_active_homes()
|
||||
await self._tibber_connection.fetch_production_data_active_homes()
|
||||
await self._insert_statistics()
|
||||
try:
|
||||
await self._tibber_connection.fetch_consumption_data_active_homes()
|
||||
await self._tibber_connection.fetch_production_data_active_homes()
|
||||
await self._insert_statistics()
|
||||
except tibber.RetryableHttpException as err:
|
||||
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
|
||||
except tibber.FatalHttpException:
|
||||
# Fatal error. Reload config entry to show correct error.
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
)
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert Tibber statistics."""
|
||||
|
||||
@@ -94,7 +94,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_PROJECT_LABEL_WHITELIST, default=[]
|
||||
): vol.All(cv.ensure_list, [vol.All(cv.string, vol.Lower)]),
|
||||
): vol.All(cv.ensure_list, [vol.All(cv.string)]),
|
||||
}
|
||||
)
|
||||
]
|
||||
@@ -458,9 +458,8 @@ class TodoistProjectData:
|
||||
|
||||
# All task Labels (optional parameter).
|
||||
task[LABELS] = [
|
||||
label.name.lower() for label in self._labels if label.id in data.labels
|
||||
label.name for label in self._labels if label.name in data.labels
|
||||
]
|
||||
|
||||
if self._label_whitelist and (
|
||||
not any(label in task[LABELS] for label in self._label_whitelist)
|
||||
):
|
||||
|
||||
@@ -3,9 +3,12 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import re
|
||||
from types import MappingProxyType
|
||||
from typing import Any, NamedTuple
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from tplink_omada_client.exceptions import (
|
||||
ConnectionFailed,
|
||||
LoginFailed,
|
||||
@@ -20,7 +23,10 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -42,11 +48,26 @@ async def create_omada_client(
|
||||
hass: HomeAssistant, data: MappingProxyType[str, Any]
|
||||
) -> OmadaClient:
|
||||
"""Create a TP-Link Omada client API for the given config entry."""
|
||||
host = data[CONF_HOST]
|
||||
|
||||
host: str = data[CONF_HOST]
|
||||
verify_ssl = bool(data[CONF_VERIFY_SSL])
|
||||
|
||||
if not host.lower().startswith(("http://", "https://")):
|
||||
host = "https://" + host
|
||||
host_parts = urlsplit(host)
|
||||
if (
|
||||
host_parts.hostname
|
||||
and re.fullmatch(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", host_parts.hostname)
|
||||
is not None
|
||||
):
|
||||
# TP-Link API uses cookies for login session, so an unsafe cookie jar is required for IP addresses
|
||||
websession = async_create_clientsession(hass, cookie_jar=CookieJar(unsafe=True))
|
||||
else:
|
||||
websession = async_get_clientsession(hass, verify_ssl=verify_ssl)
|
||||
|
||||
username = data[CONF_USERNAME]
|
||||
password = data[CONF_PASSWORD]
|
||||
websession = async_get_clientsession(hass, verify_ssl=verify_ssl)
|
||||
|
||||
return OmadaClient(host, username, password, websession=websession)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Support for the Tuya lights."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -59,7 +59,9 @@ class TuyaLightEntityDescription(LightEntityDescription):
|
||||
color_data: DPCode | tuple[DPCode, ...] | None = None
|
||||
color_mode: DPCode | None = None
|
||||
color_temp: DPCode | tuple[DPCode, ...] | None = None
|
||||
default_color_type: ColorTypeData = DEFAULT_COLOR_TYPE_DATA
|
||||
default_color_type: ColorTypeData = field(
|
||||
default_factory=lambda: DEFAULT_COLOR_TYPE_DATA
|
||||
)
|
||||
|
||||
|
||||
LIGHTS: dict[str, tuple[TuyaLightEntityDescription, ...]] = {
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
import datetime as dt
|
||||
from functools import lru_cache
|
||||
import json
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -424,6 +425,12 @@ def handle_ping(
|
||||
connection.send_message(pong_message(msg["id"]))
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
@decorators.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "render_template",
|
||||
@@ -440,7 +447,7 @@ async def handle_render_template(
|
||||
) -> None:
|
||||
"""Handle render_template command."""
|
||||
template_str = msg["template"]
|
||||
template_obj = template.Template(template_str, hass)
|
||||
template_obj = _cached_template(template_str, hass)
|
||||
variables = msg.get("variables")
|
||||
timeout = msg.get("timeout")
|
||||
info = None
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Support for Zigbee Home Automation devices."""
|
||||
import asyncio
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
|
||||
@@ -90,6 +91,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
Will automatically load components to support devices found on the network.
|
||||
"""
|
||||
|
||||
# Strip whitespace around `socket://` URIs, this is no longer accepted by zigpy
|
||||
# This will be removed in 2023.7.0
|
||||
path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
data = copy.deepcopy(dict(config_entry.data))
|
||||
|
||||
if path.startswith("socket://") and path != path.strip():
|
||||
data[CONF_DEVICE][CONF_DEVICE_PATH] = path.strip()
|
||||
hass.config_entries.async_update_entry(config_entry, data=data)
|
||||
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 3
|
||||
PATCH_VERSION: Final = "0b5"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||
|
||||
@@ -2202,6 +2202,12 @@
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "gree"
|
||||
},
|
||||
"heltun": {
|
||||
"name": "HELTUN",
|
||||
"iot_standards": [
|
||||
"zwave"
|
||||
]
|
||||
},
|
||||
"here_travel_time": {
|
||||
"name": "HERE Travel Time",
|
||||
"integration_type": "hub",
|
||||
|
||||
@@ -21,16 +21,16 @@ cryptography==39.0.1
|
||||
dbus-fast==1.84.1
|
||||
fnvhash==0.1.0
|
||||
hass-nabucasa==0.61.0
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
home-assistant-bluetooth==1.9.3
|
||||
home-assistant-frontend==20230227.0
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-frontend==20230306.0
|
||||
home-assistant-intents==2023.2.28
|
||||
httpx==0.23.3
|
||||
ifaddr==0.1.7
|
||||
janus==1.0.0
|
||||
jinja2==3.1.2
|
||||
lru-dict==1.1.8
|
||||
orjson==3.8.6
|
||||
orjson==3.8.7
|
||||
paho-mqtt==1.6.1
|
||||
pillow==9.4.0
|
||||
pip>=21.0,<23.1
|
||||
@@ -40,9 +40,9 @@ pyserial==3.5
|
||||
python-slugify==4.0.1
|
||||
pyudev==0.23.2
|
||||
pyyaml==6.0
|
||||
requests==2.28.1
|
||||
requests==2.28.2
|
||||
scapy==2.5.0
|
||||
sqlalchemy==2.0.4
|
||||
sqlalchemy==2.0.5.post1
|
||||
typing-extensions>=4.5.0,<5.0
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous==0.13.1
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.3.0b5"
|
||||
version = "2023.3.2"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
@@ -44,11 +44,11 @@ dependencies = [
|
||||
"cryptography==39.0.1",
|
||||
# pyOpenSSL 23.0.0 is required to work with cryptography 39+
|
||||
"pyOpenSSL==23.0.0",
|
||||
"orjson==3.8.6",
|
||||
"orjson==3.8.7",
|
||||
"pip>=21.0,<23.1",
|
||||
"python-slugify==4.0.1",
|
||||
"pyyaml==6.0",
|
||||
"requests==2.28.1",
|
||||
"requests==2.28.2",
|
||||
"typing-extensions>=4.5.0,<5.0",
|
||||
"voluptuous==0.13.1",
|
||||
"voluptuous-serialize==2.6.0",
|
||||
|
||||
@@ -18,11 +18,11 @@ lru-dict==1.1.8
|
||||
PyJWT==2.5.0
|
||||
cryptography==39.0.1
|
||||
pyOpenSSL==23.0.0
|
||||
orjson==3.8.6
|
||||
orjson==3.8.7
|
||||
pip>=21.0,<23.1
|
||||
python-slugify==4.0.1
|
||||
pyyaml==6.0
|
||||
requests==2.28.1
|
||||
requests==2.28.2
|
||||
typing-extensions>=4.5.0,<5.0
|
||||
voluptuous==0.13.1
|
||||
voluptuous-serialize==2.6.0
|
||||
|
||||
@@ -156,7 +156,7 @@ aioecowitt==2023.01.0
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==13.4.1
|
||||
aioesphomeapi==13.4.2
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -181,7 +181,7 @@ aiohomekit==2.6.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.6.1
|
||||
aiohue==4.6.2
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -276,7 +276,7 @@ aioskybell==22.7.0
|
||||
aioslimproto==2.1.1
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
aiosomecomfort==0.0.10
|
||||
aiosomecomfort==0.0.11
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.2
|
||||
@@ -383,7 +383,7 @@ asyncsleepiq==1.2.3
|
||||
atenpdu==0.3.2
|
||||
|
||||
# homeassistant.components.aurora
|
||||
auroranoaa==0.0.2
|
||||
auroranoaa==0.0.3
|
||||
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.7
|
||||
@@ -874,7 +874,7 @@ hass-nabucasa==0.61.0
|
||||
hass_splunk==0.1.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
|
||||
# homeassistant.components.tasmota
|
||||
hatasmota==0.6.4
|
||||
@@ -907,10 +907,10 @@ hole==0.8.0
|
||||
holidays==0.18.0
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230227.0
|
||||
home-assistant-frontend==20230306.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-intents==2023.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -979,7 +979,7 @@ influxdb==5.3.1
|
||||
inkbird-ble==0.5.6
|
||||
|
||||
# homeassistant.components.insteon
|
||||
insteon-frontend-home-assistant==0.3.2
|
||||
insteon-frontend-home-assistant==0.3.3
|
||||
|
||||
# homeassistant.components.intellifire
|
||||
intellifire4py==2.2.2
|
||||
@@ -1225,7 +1225,7 @@ nsapi==3.0.5
|
||||
nsw-fuel-api-client==1.1.0
|
||||
|
||||
# homeassistant.components.nuheat
|
||||
nuheat==1.0.0
|
||||
nuheat==1.0.1
|
||||
|
||||
# homeassistant.components.numato
|
||||
numato-gpio==0.10.0
|
||||
@@ -1430,7 +1430,7 @@ py-canary==0.5.3
|
||||
py-cpuinfo==8.0.0
|
||||
|
||||
# homeassistant.components.dormakaba_dkey
|
||||
py-dormakaba-dkey==1.0.2
|
||||
py-dormakaba-dkey==1.0.4
|
||||
|
||||
# homeassistant.components.melissa
|
||||
py-melissa-climate==2.1.4
|
||||
@@ -1473,7 +1473,7 @@ pyRFXtrx==0.30.1
|
||||
pySwitchmate==0.5.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.26.13
|
||||
pyTibber==0.27.0
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.7.0
|
||||
@@ -1621,7 +1621,7 @@ pyevilgenius==2.0.0
|
||||
pyezviz==0.2.0.9
|
||||
|
||||
# homeassistant.components.fibaro
|
||||
pyfibaro==0.6.8
|
||||
pyfibaro==0.6.9
|
||||
|
||||
# homeassistant.components.fido
|
||||
pyfido==2.1.2
|
||||
@@ -1687,7 +1687,7 @@ pyialarm==2.2.0
|
||||
pyicloud==1.0.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.3.2
|
||||
pyinsteon==1.3.4
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.8.0
|
||||
@@ -2097,7 +2097,7 @@ python-nest==4.2.0
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==1.0.4
|
||||
python-otbr-api==1.0.5
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api==1.1.0
|
||||
@@ -2367,7 +2367,7 @@ smart-meter-texas==0.4.7
|
||||
smhi-pkg==1.0.16
|
||||
|
||||
# homeassistant.components.snapcast
|
||||
snapcast==2.3.0
|
||||
snapcast==2.3.2
|
||||
|
||||
# homeassistant.components.sonos
|
||||
soco==0.29.1
|
||||
@@ -2398,7 +2398,7 @@ spotipy==2.22.1
|
||||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
sqlalchemy==2.0.4
|
||||
sqlalchemy==2.0.5.post1
|
||||
|
||||
# homeassistant.components.srp_energy
|
||||
srpenergy==1.3.6
|
||||
|
||||
@@ -143,7 +143,7 @@ aioecowitt==2023.01.0
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==13.4.1
|
||||
aioesphomeapi==13.4.2
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -165,7 +165,7 @@ aiohomekit==2.6.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.6.1
|
||||
aiohue==4.6.2
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -254,7 +254,7 @@ aioskybell==22.7.0
|
||||
aioslimproto==2.1.1
|
||||
|
||||
# homeassistant.components.honeywell
|
||||
aiosomecomfort==0.0.10
|
||||
aiosomecomfort==0.0.11
|
||||
|
||||
# homeassistant.components.steamist
|
||||
aiosteamist==0.3.2
|
||||
@@ -334,7 +334,7 @@ async-upnp-client==0.33.1
|
||||
asyncsleepiq==1.2.3
|
||||
|
||||
# homeassistant.components.aurora
|
||||
auroranoaa==0.0.2
|
||||
auroranoaa==0.0.3
|
||||
|
||||
# homeassistant.components.aurora_abb_powerone
|
||||
aurorapy==0.2.7
|
||||
@@ -666,7 +666,7 @@ habitipy==0.2.0
|
||||
hass-nabucasa==0.61.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==1.0.5
|
||||
hassil==1.0.6
|
||||
|
||||
# homeassistant.components.tasmota
|
||||
hatasmota==0.6.4
|
||||
@@ -690,10 +690,10 @@ hole==0.8.0
|
||||
holidays==0.18.0
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230227.0
|
||||
home-assistant-frontend==20230306.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.2.22
|
||||
home-assistant-intents==2023.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -738,7 +738,7 @@ influxdb==5.3.1
|
||||
inkbird-ble==0.5.6
|
||||
|
||||
# homeassistant.components.insteon
|
||||
insteon-frontend-home-assistant==0.3.2
|
||||
insteon-frontend-home-assistant==0.3.3
|
||||
|
||||
# homeassistant.components.intellifire
|
||||
intellifire4py==2.2.2
|
||||
@@ -903,7 +903,7 @@ notify-events==1.0.4
|
||||
nsw-fuel-api-client==1.1.0
|
||||
|
||||
# homeassistant.components.nuheat
|
||||
nuheat==1.0.0
|
||||
nuheat==1.0.1
|
||||
|
||||
# homeassistant.components.numato
|
||||
numato-gpio==0.10.0
|
||||
@@ -1045,7 +1045,7 @@ py-canary==0.5.3
|
||||
py-cpuinfo==8.0.0
|
||||
|
||||
# homeassistant.components.dormakaba_dkey
|
||||
py-dormakaba-dkey==1.0.2
|
||||
py-dormakaba-dkey==1.0.4
|
||||
|
||||
# homeassistant.components.melissa
|
||||
py-melissa-climate==2.1.4
|
||||
@@ -1076,7 +1076,7 @@ pyMetno==0.9.0
|
||||
pyRFXtrx==0.30.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.26.13
|
||||
pyTibber==0.27.0
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.7.0
|
||||
@@ -1161,7 +1161,7 @@ pyevilgenius==2.0.0
|
||||
pyezviz==0.2.0.9
|
||||
|
||||
# homeassistant.components.fibaro
|
||||
pyfibaro==0.6.8
|
||||
pyfibaro==0.6.9
|
||||
|
||||
# homeassistant.components.fido
|
||||
pyfido==2.1.2
|
||||
@@ -1212,7 +1212,7 @@ pyialarm==2.2.0
|
||||
pyicloud==1.0.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.3.2
|
||||
pyinsteon==1.3.4
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==3.0.6
|
||||
@@ -1490,7 +1490,7 @@ python-nest==4.2.0
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==1.0.4
|
||||
python-otbr-api==1.0.5
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api==1.1.0
|
||||
@@ -1698,7 +1698,7 @@ spotipy==2.22.1
|
||||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
sqlalchemy==2.0.4
|
||||
sqlalchemy==2.0.5.post1
|
||||
|
||||
# homeassistant.components.srp_energy
|
||||
srpenergy==1.3.6
|
||||
|
||||
@@ -349,6 +349,52 @@ async def test_api_template(hass: HomeAssistant, mock_api_client: TestClient) ->
|
||||
|
||||
assert body == "10"
|
||||
|
||||
hass.states.async_set("sensor.temperature", 20)
|
||||
resp = await mock_api_client.post(
|
||||
const.URL_API_TEMPLATE,
|
||||
json={"template": "{{ states.sensor.temperature.state }}"},
|
||||
)
|
||||
|
||||
body = await resp.text()
|
||||
|
||||
assert body == "20"
|
||||
|
||||
hass.states.async_remove("sensor.temperature")
|
||||
resp = await mock_api_client.post(
|
||||
const.URL_API_TEMPLATE,
|
||||
json={"template": "{{ states.sensor.temperature.state }}"},
|
||||
)
|
||||
|
||||
body = await resp.text()
|
||||
|
||||
assert body == ""
|
||||
|
||||
|
||||
async def test_api_template_cached(
|
||||
hass: HomeAssistant, mock_api_client: TestClient
|
||||
) -> None:
|
||||
"""Test the template API uses the cache."""
|
||||
hass.states.async_set("sensor.temperature", 30)
|
||||
|
||||
resp = await mock_api_client.post(
|
||||
const.URL_API_TEMPLATE,
|
||||
json={"template": "{{ states.sensor.temperature.state }}"},
|
||||
)
|
||||
|
||||
body = await resp.text()
|
||||
|
||||
assert body == "30"
|
||||
|
||||
hass.states.async_set("sensor.temperature", 40)
|
||||
resp = await mock_api_client.post(
|
||||
const.URL_API_TEMPLATE,
|
||||
json={"template": "{{ states.sensor.temperature.state }}"},
|
||||
)
|
||||
|
||||
body = await resp.text()
|
||||
|
||||
assert body == "40"
|
||||
|
||||
|
||||
async def test_api_template_error(
|
||||
hass: HomeAssistant, mock_api_client: TestClient
|
||||
|
||||
@@ -4,7 +4,6 @@ from math import sin
|
||||
import random
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass
|
||||
from homeassistant.const import UnitOfPower, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -79,7 +78,6 @@ async def setup_tests(hass, config, times, values, expected_state):
|
||||
assert state is not None
|
||||
|
||||
assert round(float(state.state), config["sensor"]["round"]) == expected_state
|
||||
assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT
|
||||
|
||||
return state
|
||||
|
||||
|
||||
@@ -1101,7 +1101,7 @@ async def test_temperature_control(hass: HomeAssistant) -> None:
|
||||
BASIC_CONFIG,
|
||||
)
|
||||
assert trt.sync_attributes() == {
|
||||
"queryOnlyTemperatureSetting": True,
|
||||
"queryOnlyTemperatureControl": True,
|
||||
"temperatureUnitForUX": "C",
|
||||
"temperatureRange": {"maxThresholdCelsius": 100, "minThresholdCelsius": -100},
|
||||
}
|
||||
@@ -2941,7 +2941,7 @@ async def test_temperature_control_sensor_data(
|
||||
)
|
||||
|
||||
assert trt.sync_attributes() == {
|
||||
"queryOnlyTemperatureSetting": True,
|
||||
"queryOnlyTemperatureControl": True,
|
||||
"temperatureUnitForUX": unit_out,
|
||||
"temperatureRange": {"maxThresholdCelsius": 100, "minThresholdCelsius": -100},
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Fixtures for Hass.io."""
|
||||
import os
|
||||
import re
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
@@ -12,6 +13,16 @@ from homeassistant.setup import async_setup_component
|
||||
from . import SUPERVISOR_TOKEN
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_security_filter():
|
||||
"""Disable the security filter to ensure the integration is secure."""
|
||||
with patch(
|
||||
"homeassistant.components.http.security_filter.FILTERS",
|
||||
re.compile("not-matching-anything"),
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def hassio_env():
|
||||
"""Fixture to inject hassio env."""
|
||||
@@ -37,6 +48,13 @@ def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock):
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_info",
|
||||
side_effect=HassioAPIError(),
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_ingress_panels",
|
||||
return_value={"panels": []},
|
||||
), patch(
|
||||
"homeassistant.components.hassio.repairs.SupervisorRepairs.setup"
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.refresh_updates"
|
||||
):
|
||||
hass.state = CoreState.starting
|
||||
hass.loop.run_until_complete(async_setup_component(hass, "hassio", {}))
|
||||
@@ -67,13 +85,7 @@ async def hassio_client_supervisor(hass, aiohttp_client, hassio_stubs):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def hassio_handler(hass, aioclient_mock):
|
||||
async def hassio_handler(hass, aioclient_mock):
|
||||
"""Create mock hassio handler."""
|
||||
|
||||
async def get_client_session():
|
||||
return async_get_clientsession(hass)
|
||||
|
||||
websession = hass.loop.run_until_complete(get_client_session())
|
||||
|
||||
with patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}):
|
||||
yield HassIO(hass.loop, websession, "127.0.0.1")
|
||||
yield HassIO(hass.loop, async_get_clientsession(hass), "127.0.0.1")
|
||||
|
||||
@@ -1,13 +1,21 @@
|
||||
"""The tests for the hassio component."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Literal
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.hassio.handler import HassioAPIError
|
||||
from homeassistant.components.hassio.handler import HassIO, HassioAPIError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def test_api_ping(hassio_handler, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
async def test_api_ping(
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API ping."""
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
|
||||
|
||||
@@ -16,7 +24,7 @@ async def test_api_ping(hassio_handler, aioclient_mock: AiohttpClientMocker) ->
|
||||
|
||||
|
||||
async def test_api_ping_error(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API ping error."""
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "error"})
|
||||
@@ -26,7 +34,7 @@ async def test_api_ping_error(
|
||||
|
||||
|
||||
async def test_api_ping_exeption(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API ping exception."""
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError())
|
||||
@@ -35,7 +43,9 @@ async def test_api_ping_exeption(
|
||||
assert aioclient_mock.call_count == 1
|
||||
|
||||
|
||||
async def test_api_info(hassio_handler, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
async def test_api_info(
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API generic info."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/info",
|
||||
@@ -53,7 +63,7 @@ async def test_api_info(hassio_handler, aioclient_mock: AiohttpClientMocker) ->
|
||||
|
||||
|
||||
async def test_api_info_error(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant info error."""
|
||||
aioclient_mock.get(
|
||||
@@ -67,7 +77,7 @@ async def test_api_info_error(
|
||||
|
||||
|
||||
async def test_api_host_info(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Host info."""
|
||||
aioclient_mock.get(
|
||||
@@ -90,7 +100,7 @@ async def test_api_host_info(
|
||||
|
||||
|
||||
async def test_api_supervisor_info(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Supervisor info."""
|
||||
aioclient_mock.get(
|
||||
@@ -108,7 +118,9 @@ async def test_api_supervisor_info(
|
||||
assert data["channel"] == "stable"
|
||||
|
||||
|
||||
async def test_api_os_info(hassio_handler, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
async def test_api_os_info(
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API OS info."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/os/info",
|
||||
@@ -125,7 +137,7 @@ async def test_api_os_info(hassio_handler, aioclient_mock: AiohttpClientMocker)
|
||||
|
||||
|
||||
async def test_api_host_info_error(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant info error."""
|
||||
aioclient_mock.get(
|
||||
@@ -139,7 +151,7 @@ async def test_api_host_info_error(
|
||||
|
||||
|
||||
async def test_api_core_info(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant Core info."""
|
||||
aioclient_mock.get(
|
||||
@@ -153,7 +165,7 @@ async def test_api_core_info(
|
||||
|
||||
|
||||
async def test_api_core_info_error(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant Core info error."""
|
||||
aioclient_mock.get(
|
||||
@@ -167,7 +179,7 @@ async def test_api_core_info_error(
|
||||
|
||||
|
||||
async def test_api_homeassistant_stop(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant stop."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
|
||||
@@ -177,7 +189,7 @@ async def test_api_homeassistant_stop(
|
||||
|
||||
|
||||
async def test_api_homeassistant_restart(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Home Assistant restart."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
|
||||
@@ -187,7 +199,7 @@ async def test_api_homeassistant_restart(
|
||||
|
||||
|
||||
async def test_api_addon_info(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Add-on info."""
|
||||
aioclient_mock.get(
|
||||
@@ -201,7 +213,7 @@ async def test_api_addon_info(
|
||||
|
||||
|
||||
async def test_api_addon_stats(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Add-on stats."""
|
||||
aioclient_mock.get(
|
||||
@@ -215,7 +227,7 @@ async def test_api_addon_stats(
|
||||
|
||||
|
||||
async def test_api_discovery_message(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API discovery message."""
|
||||
aioclient_mock.get(
|
||||
@@ -229,7 +241,7 @@ async def test_api_discovery_message(
|
||||
|
||||
|
||||
async def test_api_retrieve_discovery(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API discovery message."""
|
||||
aioclient_mock.get(
|
||||
@@ -243,7 +255,7 @@ async def test_api_retrieve_discovery(
|
||||
|
||||
|
||||
async def test_api_ingress_panels(
|
||||
hassio_handler, aioclient_mock: AiohttpClientMocker
|
||||
hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with API Ingress panels."""
|
||||
aioclient_mock.get(
|
||||
@@ -267,3 +279,56 @@ async def test_api_ingress_panels(
|
||||
assert aioclient_mock.call_count == 1
|
||||
assert data["panels"]
|
||||
assert "slug" in data["panels"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("api_call", "method", "payload"),
|
||||
[
|
||||
["retrieve_discovery_messages", "GET", None],
|
||||
["refresh_updates", "POST", None],
|
||||
["update_diagnostics", "POST", True],
|
||||
],
|
||||
)
|
||||
async def test_api_headers(
|
||||
hass,
|
||||
aiohttp_raw_server,
|
||||
socket_enabled,
|
||||
api_call: str,
|
||||
method: Literal["GET", "POST"],
|
||||
payload: Any,
|
||||
) -> None:
|
||||
"""Test headers are forwarded correctly."""
|
||||
received_request = None
|
||||
|
||||
async def mock_handler(request):
|
||||
"""Return OK."""
|
||||
nonlocal received_request
|
||||
received_request = request
|
||||
return web.json_response({"result": "ok", "data": None})
|
||||
|
||||
server = await aiohttp_raw_server(mock_handler)
|
||||
hassio_handler = HassIO(
|
||||
hass.loop,
|
||||
async_get_clientsession(hass),
|
||||
f"{server.host}:{server.port}",
|
||||
)
|
||||
|
||||
api_func = getattr(hassio_handler, api_call)
|
||||
if payload:
|
||||
await api_func(payload)
|
||||
else:
|
||||
await api_func()
|
||||
assert received_request is not None
|
||||
|
||||
assert received_request.method == method
|
||||
assert received_request.headers.get("X-Hass-Source") == "core.handler"
|
||||
|
||||
if method == "GET":
|
||||
assert hdrs.CONTENT_TYPE not in received_request.headers
|
||||
return
|
||||
|
||||
assert hdrs.CONTENT_TYPE in received_request.headers
|
||||
if payload:
|
||||
assert received_request.headers[hdrs.CONTENT_TYPE] == "application/json"
|
||||
else:
|
||||
assert received_request.headers[hdrs.CONTENT_TYPE] == "application/octet-stream"
|
||||
|
||||
@@ -1,63 +1,45 @@
|
||||
"""The tests for the hassio component."""
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from unittest.mock import patch
|
||||
|
||||
from aiohttp import StreamReader
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.hassio.http import _need_auth
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockUser
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def test_forward_request(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.post("http://127.0.0.1/beer", text="response")
|
||||
@pytest.fixture
|
||||
def mock_not_onboarded():
|
||||
"""Mock that we're not onboarded."""
|
||||
with patch(
|
||||
"homeassistant.components.hassio.http.async_is_onboarded", return_value=False
|
||||
):
|
||||
yield
|
||||
|
||||
resp = await hassio_client.post("/api/hassio/beer")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
body = await resp.text()
|
||||
assert body == "response"
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
@pytest.fixture
|
||||
def hassio_user_client(hassio_client, hass_admin_user):
|
||||
"""Return a Hass.io HTTP client tied to a non-admin user."""
|
||||
hass_admin_user.groups = []
|
||||
return hassio_client
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"build_type", ["supervisor/info", "homeassistant/update", "host/info"]
|
||||
)
|
||||
async def test_auth_required_forward_request(hassio_noauth_client, build_type) -> None:
|
||||
"""Test auth required for normal request."""
|
||||
resp = await hassio_noauth_client.post(f"/api/hassio/{build_type}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.UNAUTHORIZED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"build_type",
|
||||
"path",
|
||||
[
|
||||
"app/index.html",
|
||||
"app/hassio-app.html",
|
||||
"app/index.html",
|
||||
"app/hassio-app.html",
|
||||
"app/some-chunk.js",
|
||||
"app/app.js",
|
||||
"app/entrypoint.js",
|
||||
"addons/bl_b392/logo",
|
||||
"addons/bl_b392/icon",
|
||||
],
|
||||
)
|
||||
async def test_forward_request_no_auth_for_panel(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
async def test_forward_request_onboarded_user_get(
|
||||
hassio_user_client, aioclient_mock: AiohttpClientMocker, path: str
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{build_type}", text="response")
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_client.get(f"/api/hassio/{build_type}")
|
||||
resp = await hassio_user_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
@@ -66,15 +48,68 @@ async def test_forward_request_no_auth_for_panel(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
# We only expect a single header.
|
||||
assert aioclient_mock.mock_calls[0][3] == {"X-Hass-Source": "core.http"}
|
||||
|
||||
|
||||
async def test_forward_request_no_auth_for_logo(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
@pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"])
|
||||
async def test_forward_request_onboarded_user_unallowed_methods(
|
||||
hassio_user_client, aioclient_mock: AiohttpClientMocker, method: str
|
||||
) -> None:
|
||||
"""Test no auth needed for logo."""
|
||||
aioclient_mock.get("http://127.0.0.1/addons/bl_b392/logo", text="response")
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_user_client.post("/api/hassio/app/entrypoint.js")
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/addons/bl_b392/logo")
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.METHOD_NOT_ALLOWED
|
||||
|
||||
# Check we did not forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bad_path", "expected_status"),
|
||||
[
|
||||
# Caught by bullshit filter
|
||||
("app/%252E./entrypoint.js", HTTPStatus.BAD_REQUEST),
|
||||
# The .. is processed, making it an unauthenticated path
|
||||
("app/../entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
("app/%2E%2E/entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
# Unauthenticated path
|
||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_onboarded_user_unallowed_paths(
|
||||
hassio_user_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
bad_path: str,
|
||||
expected_status: int,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_user_client.get(f"/api/hassio/{bad_path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == expected_status
|
||||
# Check we didn't forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
"app/entrypoint.js",
|
||||
"addons/bl_b392/logo",
|
||||
"addons/bl_b392/icon",
|
||||
],
|
||||
)
|
||||
async def test_forward_request_onboarded_noauth_get(
|
||||
hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_noauth_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
@@ -83,15 +118,73 @@ async def test_forward_request_no_auth_for_logo(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
# We only expect a single header.
|
||||
assert aioclient_mock.mock_calls[0][3] == {"X-Hass-Source": "core.http"}
|
||||
|
||||
|
||||
async def test_forward_request_no_auth_for_icon(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
@pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"])
|
||||
async def test_forward_request_onboarded_noauth_unallowed_methods(
|
||||
hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str
|
||||
) -> None:
|
||||
"""Test no auth needed for icon."""
|
||||
aioclient_mock.get("http://127.0.0.1/addons/bl_b392/icon", text="response")
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js")
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/addons/bl_b392/icon")
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.METHOD_NOT_ALLOWED
|
||||
|
||||
# Check we did not forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bad_path", "expected_status"),
|
||||
[
|
||||
# Caught by bullshit filter
|
||||
("app/%252E./entrypoint.js", HTTPStatus.BAD_REQUEST),
|
||||
# The .. is processed, making it an unauthenticated path
|
||||
("app/../entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
("app/%2E%2E/entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
# Unauthenticated path
|
||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_onboarded_noauth_unallowed_paths(
|
||||
hassio_noauth_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
bad_path: str,
|
||||
expected_status: int,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_noauth_client.get(f"/api/hassio/{bad_path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == expected_status
|
||||
# Check we didn't forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("path", "authenticated"),
|
||||
[
|
||||
("app/entrypoint.js", False),
|
||||
("addons/bl_b392/logo", False),
|
||||
("addons/bl_b392/icon", False),
|
||||
("backups/1234abcd/info", True),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_not_onboarded_get(
|
||||
hassio_noauth_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
path: str,
|
||||
authenticated: bool,
|
||||
mock_not_onboarded,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_noauth_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
@@ -100,61 +193,224 @@ async def test_forward_request_no_auth_for_icon(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
expected_headers = {
|
||||
"X-Hass-Source": "core.http",
|
||||
}
|
||||
if authenticated:
|
||||
expected_headers["Authorization"] = "Bearer 123456"
|
||||
|
||||
assert aioclient_mock.mock_calls[0][3] == expected_headers
|
||||
|
||||
|
||||
async def test_forward_log_request(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
"backups/new/upload",
|
||||
"backups/1234abcd/restore/full",
|
||||
"backups/1234abcd/restore/partial",
|
||||
],
|
||||
)
|
||||
async def test_forward_request_not_onboarded_post(
|
||||
hassio_noauth_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
path: str,
|
||||
mock_not_onboarded,
|
||||
) -> None:
|
||||
"""Test fetching normal log path doesn't remove ANSI color escape codes."""
|
||||
aioclient_mock.get("http://127.0.0.1/beer/logs", text="\033[32mresponse\033[0m")
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/beer/logs")
|
||||
resp = await hassio_noauth_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
body = await resp.text()
|
||||
assert body == "\033[32mresponse\033[0m"
|
||||
assert body == "response"
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
# We only expect a single header.
|
||||
assert aioclient_mock.mock_calls[0][3] == {
|
||||
"X-Hass-Source": "core.http",
|
||||
"Authorization": "Bearer 123456",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"])
|
||||
async def test_forward_request_not_onboarded_unallowed_methods(
|
||||
hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.METHOD_NOT_ALLOWED
|
||||
|
||||
# Check we did not forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bad_path", "expected_status"),
|
||||
[
|
||||
# Caught by bullshit filter
|
||||
("app/%252E./entrypoint.js", HTTPStatus.BAD_REQUEST),
|
||||
# The .. is processed, making it an unauthenticated path
|
||||
("app/../entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
("app/%2E%2E/entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
# Unauthenticated path
|
||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_not_onboarded_unallowed_paths(
|
||||
hassio_noauth_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
bad_path: str,
|
||||
expected_status: int,
|
||||
mock_not_onboarded,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_noauth_client.get(f"/api/hassio/{bad_path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == expected_status
|
||||
# Check we didn't forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("path", "authenticated"),
|
||||
[
|
||||
("app/entrypoint.js", False),
|
||||
("addons/bl_b392/logo", False),
|
||||
("addons/bl_b392/icon", False),
|
||||
("backups/1234abcd/info", True),
|
||||
("supervisor/logs", True),
|
||||
("addons/bl_b392/logs", True),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_admin_get(
|
||||
hassio_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
path: str,
|
||||
authenticated: bool,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
body = await resp.text()
|
||||
assert body == "response"
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
expected_headers = {
|
||||
"X-Hass-Source": "core.http",
|
||||
}
|
||||
if authenticated:
|
||||
expected_headers["Authorization"] = "Bearer 123456"
|
||||
|
||||
assert aioclient_mock.mock_calls[0][3] == expected_headers
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
"backups/new/upload",
|
||||
"backups/1234abcd/restore/full",
|
||||
"backups/1234abcd/restore/partial",
|
||||
],
|
||||
)
|
||||
async def test_forward_request_admin_post(
|
||||
hassio_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
path: str,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/{path}", text="response")
|
||||
|
||||
resp = await hassio_client.get(f"/api/hassio/{path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
body = await resp.text()
|
||||
assert body == "response"
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
# We only expect a single header.
|
||||
assert aioclient_mock.mock_calls[0][3] == {
|
||||
"X-Hass-Source": "core.http",
|
||||
"Authorization": "Bearer 123456",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"])
|
||||
async def test_forward_request_admin_unallowed_methods(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker, method: str
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_client.post("/api/hassio/app/entrypoint.js")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.METHOD_NOT_ALLOWED
|
||||
|
||||
# Check we did not forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bad_path", "expected_status"),
|
||||
[
|
||||
# Caught by bullshit filter
|
||||
("app/%252E./entrypoint.js", HTTPStatus.BAD_REQUEST),
|
||||
# The .. is processed, making it an unauthenticated path
|
||||
("app/../entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
("app/%2E%2E/entrypoint.js", HTTPStatus.UNAUTHORIZED),
|
||||
# Unauthenticated path
|
||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||
],
|
||||
)
|
||||
async def test_forward_request_admin_unallowed_paths(
|
||||
hassio_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
bad_path: str,
|
||||
expected_status: int,
|
||||
) -> None:
|
||||
"""Test fetching normal path."""
|
||||
resp = await hassio_client.get(f"/api/hassio/{bad_path}")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == expected_status
|
||||
# Check we didn't forward command
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
async def test_bad_gateway_when_cannot_find_supervisor(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we get a bad gateway error if we can't find supervisor."""
|
||||
aioclient_mock.get("http://127.0.0.1/addons/test/info", exc=asyncio.TimeoutError)
|
||||
aioclient_mock.get("http://127.0.0.1/app/entrypoint.js", exc=asyncio.TimeoutError)
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/addons/test/info")
|
||||
resp = await hassio_client.get("/api/hassio/app/entrypoint.js")
|
||||
assert resp.status == HTTPStatus.BAD_GATEWAY
|
||||
|
||||
|
||||
async def test_forwarding_user_info(
|
||||
hassio_client, hass_admin_user: MockUser, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test that we forward user info correctly."""
|
||||
aioclient_mock.get("http://127.0.0.1/hello")
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/hello")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
|
||||
req_headers = aioclient_mock.mock_calls[0][-1]
|
||||
assert req_headers["X-Hass-User-ID"] == hass_admin_user.id
|
||||
assert req_headers["X-Hass-Is-Admin"] == "1"
|
||||
|
||||
|
||||
async def test_backup_upload_headers(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture
|
||||
hassio_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_not_onboarded,
|
||||
) -> None:
|
||||
"""Test that we forward the full header for backup upload."""
|
||||
content_type = "multipart/form-data; boundary='--webkit'"
|
||||
aioclient_mock.get("http://127.0.0.1/backups/new/upload")
|
||||
aioclient_mock.post("http://127.0.0.1/backups/new/upload")
|
||||
|
||||
resp = await hassio_client.get(
|
||||
resp = await hassio_client.post(
|
||||
"/api/hassio/backups/new/upload", headers={"Content-Type": content_type}
|
||||
)
|
||||
|
||||
@@ -168,19 +424,19 @@ async def test_backup_upload_headers(
|
||||
|
||||
|
||||
async def test_backup_download_headers(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker, mock_not_onboarded
|
||||
) -> None:
|
||||
"""Test that we forward the full header for backup download."""
|
||||
content_disposition = "attachment; filename=test.tar"
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/backups/slug/download",
|
||||
"http://127.0.0.1/backups/1234abcd/download",
|
||||
headers={
|
||||
"Content-Length": "50000000",
|
||||
"Content-Disposition": content_disposition,
|
||||
},
|
||||
)
|
||||
|
||||
resp = await hassio_client.get("/api/hassio/backups/slug/download")
|
||||
resp = await hassio_client.get("/api/hassio/backups/1234abcd/download")
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.OK
|
||||
@@ -190,21 +446,10 @@ async def test_backup_download_headers(
|
||||
assert resp.headers["Content-Disposition"] == content_disposition
|
||||
|
||||
|
||||
def test_need_auth(hass: HomeAssistant) -> None:
|
||||
"""Test if the requested path needs authentication."""
|
||||
assert not _need_auth(hass, "addons/test/logo")
|
||||
assert _need_auth(hass, "backups/new/upload")
|
||||
assert _need_auth(hass, "supervisor/logs")
|
||||
|
||||
hass.data["onboarding"] = False
|
||||
assert not _need_auth(hass, "backups/new/upload")
|
||||
assert not _need_auth(hass, "supervisor/logs")
|
||||
|
||||
|
||||
async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Verify that the request is a stream."""
|
||||
aioclient_mock.get("http://127.0.0.1/test")
|
||||
await hassio_client.get("/api/hassio/test", data="test")
|
||||
aioclient_mock.get("http://127.0.0.1/app/entrypoint.js")
|
||||
await hassio_client.get("/api/hassio/app/entrypoint.js", data="test")
|
||||
assert isinstance(aioclient_mock.mock_calls[-1][2], StreamReader)
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_get(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.get(
|
||||
@@ -29,7 +29,7 @@ async def test_ingress_request_get(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.get(
|
||||
resp = await hassio_noauth_client.get(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -41,7 +41,8 @@ async def test_ingress_request_get(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -63,7 +64,7 @@ async def test_ingress_request_get(
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_post(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.post(
|
||||
@@ -71,7 +72,7 @@ async def test_ingress_request_post(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.post(
|
||||
resp = await hassio_noauth_client.post(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -83,7 +84,8 @@ async def test_ingress_request_post(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -105,7 +107,7 @@ async def test_ingress_request_post(
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_put(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.put(
|
||||
@@ -113,7 +115,7 @@ async def test_ingress_request_put(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.put(
|
||||
resp = await hassio_noauth_client.put(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -125,7 +127,8 @@ async def test_ingress_request_put(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -147,7 +150,7 @@ async def test_ingress_request_put(
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_delete(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.delete(
|
||||
@@ -155,7 +158,7 @@ async def test_ingress_request_delete(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.delete(
|
||||
resp = await hassio_noauth_client.delete(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -167,7 +170,8 @@ async def test_ingress_request_delete(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -189,7 +193,7 @@ async def test_ingress_request_delete(
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_patch(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.patch(
|
||||
@@ -197,7 +201,7 @@ async def test_ingress_request_patch(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.patch(
|
||||
resp = await hassio_noauth_client.patch(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -209,7 +213,8 @@ async def test_ingress_request_patch(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -231,7 +236,7 @@ async def test_ingress_request_patch(
|
||||
],
|
||||
)
|
||||
async def test_ingress_request_options(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.options(
|
||||
@@ -239,7 +244,7 @@ async def test_ingress_request_options(
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_client.options(
|
||||
resp = await hassio_noauth_client.options(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -251,7 +256,8 @@ async def test_ingress_request_options(
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -273,20 +279,21 @@ async def test_ingress_request_options(
|
||||
],
|
||||
)
|
||||
async def test_ingress_websocket(
|
||||
hassio_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
hassio_noauth_client, build_type, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test no auth needed for ."""
|
||||
aioclient_mock.get(f"http://127.0.0.1/ingress/{build_type[0]}/{build_type[1]}")
|
||||
|
||||
# Ignore error because we can setup a full IO infrastructure
|
||||
await hassio_client.ws_connect(
|
||||
await hassio_noauth_client.ws_connect(
|
||||
f"/api/hassio_ingress/{build_type[0]}/{build_type[1]}",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
|
||||
# Check we forwarded command
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][3][X_AUTH_TOKEN] == "123456"
|
||||
assert X_AUTH_TOKEN not in aioclient_mock.mock_calls[-1][3]
|
||||
assert aioclient_mock.mock_calls[-1][3]["X-Hass-Source"] == "core.ingress"
|
||||
assert (
|
||||
aioclient_mock.mock_calls[-1][3]["X-Ingress-Path"]
|
||||
== f"/api/hassio_ingress/{build_type[0]}"
|
||||
@@ -298,7 +305,9 @@ async def test_ingress_websocket(
|
||||
|
||||
|
||||
async def test_ingress_missing_peername(
|
||||
hassio_client, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture
|
||||
hassio_noauth_client,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test hadnling of missing peername."""
|
||||
aioclient_mock.get(
|
||||
@@ -314,7 +323,7 @@ async def test_ingress_missing_peername(
|
||||
return_value=MagicMock(),
|
||||
) as transport_mock:
|
||||
transport_mock.get_extra_info = get_extra_info
|
||||
resp = await hassio_client.get(
|
||||
resp = await hassio_noauth_client.get(
|
||||
"/api/hassio_ingress/lorem/ipsum",
|
||||
headers={"X-Test-Header": "beer"},
|
||||
)
|
||||
@@ -323,3 +332,19 @@ async def test_ingress_missing_peername(
|
||||
|
||||
# Check we got right response
|
||||
assert resp.status == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
async def test_forwarding_paths_as_requested(
|
||||
hassio_noauth_client, aioclient_mock
|
||||
) -> None:
|
||||
"""Test incomnig URLs with double encoding go out as dobule encoded."""
|
||||
# This double encoded string should be forwarded double-encoded too.
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/ingress/mock-token/hello/%252e./world",
|
||||
text="test",
|
||||
)
|
||||
|
||||
resp = await hassio_noauth_client.get(
|
||||
"/api/hassio_ingress/mock-token/hello/%252e./world",
|
||||
)
|
||||
assert await resp.text() == "test"
|
||||
|
||||
@@ -153,6 +153,11 @@ async def test_websocket_supervisor_api(
|
||||
msg = await websocket_client.receive_json()
|
||||
assert msg["result"]["version_latest"] == "1.0.0"
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][3] == {
|
||||
"X-Hass-Source": "core.websocket_api",
|
||||
"Authorization": "Bearer 123456",
|
||||
}
|
||||
|
||||
|
||||
async def test_websocket_supervisor_api_error(
|
||||
hassio_env,
|
||||
|
||||
@@ -49,7 +49,17 @@ async def test_ok_requests(
|
||||
("/", {"test": "test/../../api"}, True),
|
||||
("/", {"test": "/test/%2E%2E%2f%2E%2E%2fapi"}, True),
|
||||
("/", {"test": "test/%2E%2E%2f%2E%2E%2fapi"}, True),
|
||||
("/", {"test": "test/%252E%252E/api"}, True),
|
||||
("/", {"test": "test/%252E%252E%2fapi"}, True),
|
||||
(
|
||||
"/",
|
||||
{"test": "test/%2525252E%2525252E%2525252f%2525252E%2525252E%2525252fapi"},
|
||||
True,
|
||||
),
|
||||
("/test/.%252E/api", {}, False),
|
||||
("/test/%252E%252E/api", {}, False),
|
||||
("/test/%2E%2E%2f%2E%2E%2fapi", {}, False),
|
||||
("/test/%2525252E%2525252E%2525252f%2525252E%2525252E/api", {}, False),
|
||||
("/", {"sql": ";UNION SELECT (a, b"}, True),
|
||||
("/", {"sql": "UNION%20SELECT%20%28a%2C%20b"}, True),
|
||||
("/UNION%20SELECT%20%28a%2C%20b", {}, False),
|
||||
@@ -87,7 +97,7 @@ async def test_bad_requests(
|
||||
None,
|
||||
http.request,
|
||||
"GET",
|
||||
f"http://{mock_api_client.host}:{mock_api_client.port}/{request_path}{man_params}",
|
||||
f"http://{mock_api_client.host}:{mock_api_client.port}{request_path}{man_params}",
|
||||
request_params,
|
||||
)
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ async def test_get_triggers(
|
||||
}
|
||||
for event_type in (
|
||||
ButtonEvent.INITIAL_PRESS,
|
||||
ButtonEvent.LONG_PRESS,
|
||||
ButtonEvent.LONG_RELEASE,
|
||||
ButtonEvent.REPEAT,
|
||||
ButtonEvent.SHORT_RELEASE,
|
||||
|
||||
@@ -77,16 +77,17 @@ def patch_file_upload(return_value=FIXTURE_KEYRING, side_effect=None):
|
||||
side_effect=side_effect,
|
||||
), patch(
|
||||
"pathlib.Path.mkdir"
|
||||
) as mkdir_mock:
|
||||
file_path_mock = Mock()
|
||||
file_upload_mock.return_value.__enter__.return_value = file_path_mock
|
||||
) as mkdir_mock, patch(
|
||||
"shutil.move"
|
||||
) as shutil_move_mock:
|
||||
file_upload_mock.return_value.__enter__.return_value = Mock()
|
||||
yield return_value
|
||||
if side_effect:
|
||||
mkdir_mock.assert_not_called()
|
||||
file_path_mock.rename.assert_not_called()
|
||||
shutil_move_mock.assert_not_called()
|
||||
else:
|
||||
mkdir_mock.assert_called_once()
|
||||
file_path_mock.rename.assert_called_once()
|
||||
shutil_move_mock.assert_called_once()
|
||||
|
||||
|
||||
def _gateway_descriptor(
|
||||
|
||||
@@ -89,6 +89,12 @@ def motion_blinds_connect_fixture(mock_get_source_ip):
|
||||
), patch(
|
||||
"homeassistant.components.motion_blinds.config_flow.MotionDiscovery.discover",
|
||||
return_value=TEST_DISCOVERY_1,
|
||||
), patch(
|
||||
"homeassistant.components.motion_blinds.config_flow.MotionGateway.GetDeviceList",
|
||||
return_value=True,
|
||||
), patch(
|
||||
"homeassistant.components.motion_blinds.config_flow.MotionGateway.available",
|
||||
True,
|
||||
), patch(
|
||||
"homeassistant.components.motion_blinds.gateway.AsyncMotionMulticast.Start_listen",
|
||||
return_value=True,
|
||||
@@ -355,6 +361,46 @@ async def test_dhcp_flow(hass: HomeAssistant) -> None:
|
||||
}
|
||||
|
||||
|
||||
async def test_dhcp_flow_abort(hass: HomeAssistant) -> None:
|
||||
"""Test that DHCP discovery aborts if not Motion Blinds."""
|
||||
dhcp_data = dhcp.DhcpServiceInfo(
|
||||
ip=TEST_HOST,
|
||||
hostname="MOTION_abcdef",
|
||||
macaddress=TEST_MAC,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.motion_blinds.config_flow.MotionGateway.GetDeviceList",
|
||||
side_effect=socket.timeout,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data
|
||||
)
|
||||
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_motionblinds"
|
||||
|
||||
|
||||
async def test_dhcp_flow_abort_invalid_response(hass: HomeAssistant) -> None:
|
||||
"""Test that DHCP discovery aborts if device responded with invalid data."""
|
||||
dhcp_data = dhcp.DhcpServiceInfo(
|
||||
ip=TEST_HOST,
|
||||
hostname="MOTION_abcdef",
|
||||
macaddress=TEST_MAC,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.motion_blinds.config_flow.MotionGateway.available",
|
||||
False,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data
|
||||
)
|
||||
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_motionblinds"
|
||||
|
||||
|
||||
async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
"""Test specifying non default settings using options flow."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
||||
@@ -1,8 +1,27 @@
|
||||
"""Tests for the Open Thread Border Router integration."""
|
||||
BASE_URL = "http://core-silabs-multiprotocol:8081"
|
||||
CONFIG_ENTRY_DATA = {"url": "http://core-silabs-multiprotocol:8081"}
|
||||
DATASET = bytes.fromhex(
|
||||
|
||||
DATASET_CH15 = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE00208F642646DA209B1C00708FDF57B5A"
|
||||
"0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102"
|
||||
"25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_CH16 = bytes.fromhex(
|
||||
"0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A"
|
||||
"0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102"
|
||||
"25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_INSECURE_NW_KEY = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDD24657"
|
||||
"0A336069051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_INSECURE_PASSPHRASE = bytes.fromhex(
|
||||
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDD24657"
|
||||
"0A336069051000112233445566778899AABBCCDDEEFA030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ import pytest
|
||||
|
||||
from homeassistant.components import otbr
|
||||
|
||||
from . import CONFIG_ENTRY_DATA, DATASET
|
||||
from . import CONFIG_ENTRY_DATA, DATASET_CH16
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -20,7 +20,11 @@ async def otbr_config_entry_fixture(hass):
|
||||
title="Open Thread Border Router",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET):
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16
|
||||
), patch(
|
||||
"homeassistant.components.otbr.compute_pskc"
|
||||
): # Patch to speed up tests
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ from homeassistant.components import hassio, otbr
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from . import DATASET_CH15, DATASET_CH16
|
||||
|
||||
from tests.common import MockConfigEntry, MockModule, mock_integration
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
@@ -94,7 +96,10 @@ async def test_user_flow_router_not_setup(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {"NetworkName": "home-assistant"}
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
@@ -226,7 +231,10 @@ async def test_hassio_discovery_flow_router_not_setup(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {"NetworkName": "home-assistant"}
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
@@ -263,7 +271,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred(
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.otbr.config_flow.async_get_preferred_dataset",
|
||||
return_value="aa",
|
||||
return_value=DATASET_CH15.hex(),
|
||||
), patch(
|
||||
"homeassistant.components.otbr.async_setup_entry",
|
||||
return_value=True,
|
||||
@@ -275,7 +283,60 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred(
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "PUT"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == "aa"
|
||||
assert aioclient_mock.mock_calls[-2][2] == DATASET_CH15.hex()
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
assert aioclient_mock.mock_calls[-1][2] == "enable"
|
||||
|
||||
expected_data = {
|
||||
"url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}",
|
||||
}
|
||||
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Open Thread Border Router"
|
||||
assert result["data"] == expected_data
|
||||
assert result["options"] == {}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0]
|
||||
assert config_entry.data == expected_data
|
||||
assert config_entry.options == {}
|
||||
assert config_entry.title == "Open Thread Border Router"
|
||||
assert config_entry.unique_id == otbr.DOMAIN
|
||||
|
||||
|
||||
async def test_hassio_discovery_flow_router_not_setup_has_preferred_2(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the hassio discovery flow when the border router has no dataset.
|
||||
|
||||
This tests the behavior when the thread integration has a preferred dataset, but
|
||||
the preferred dataset is not using channel 15.
|
||||
"""
|
||||
url = "http://core-silabs-multiprotocol:8081"
|
||||
aioclient_mock.get(f"{url}/node/dataset/active", status=HTTPStatus.NO_CONTENT)
|
||||
aioclient_mock.post(f"{url}/node/dataset/active", status=HTTPStatus.ACCEPTED)
|
||||
aioclient_mock.post(f"{url}/node/state", status=HTTPStatus.OK)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.otbr.config_flow.async_get_preferred_dataset",
|
||||
return_value=DATASET_CH16.hex(),
|
||||
), patch(
|
||||
"homeassistant.components.otbr.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA
|
||||
)
|
||||
|
||||
# Check we create a dataset and enable the router
|
||||
assert aioclient_mock.mock_calls[-2][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-2][1].path == "/node/dataset/active"
|
||||
assert aioclient_mock.mock_calls[-2][2] == {
|
||||
"Channel": 15,
|
||||
"NetworkName": "home-assistant",
|
||||
}
|
||||
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/node/state"
|
||||
|
||||
@@ -10,8 +10,15 @@ import python_otbr_api
|
||||
from homeassistant.components import otbr
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from . import BASE_URL, CONFIG_ENTRY_DATA, DATASET
|
||||
from . import (
|
||||
BASE_URL,
|
||||
CONFIG_ENTRY_DATA,
|
||||
DATASET_CH16,
|
||||
DATASET_INSECURE_NW_KEY,
|
||||
DATASET_INSECURE_PASSPHRASE,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
@@ -19,6 +26,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
async def test_import_dataset(hass: HomeAssistant) -> None:
|
||||
"""Test the active dataset is imported at setup."""
|
||||
issue_registry = ir.async_get(hass)
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
data=CONFIG_ENTRY_DATA,
|
||||
@@ -28,13 +36,46 @@ async def test_import_dataset(hass: HomeAssistant) -> None:
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16
|
||||
), patch(
|
||||
"homeassistant.components.thread.dataset_store.DatasetStore.async_add"
|
||||
) as mock_add:
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
mock_add.assert_called_once_with(config_entry.title, DATASET.hex())
|
||||
mock_add.assert_called_once_with(config_entry.title, DATASET_CH16.hex())
|
||||
assert not issue_registry.async_get_issue(
|
||||
domain=otbr.DOMAIN, issue_id=f"insecure_thread_network_{config_entry.entry_id}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE]
|
||||
)
|
||||
async def test_import_insecure_dataset(hass: HomeAssistant, dataset: bytes) -> None:
|
||||
"""Test the active dataset is imported at setup.
|
||||
|
||||
This imports a dataset with insecure settings.
|
||||
"""
|
||||
issue_registry = ir.async_get(hass)
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
data=CONFIG_ENTRY_DATA,
|
||||
domain=otbr.DOMAIN,
|
||||
options={},
|
||||
title="My OTBR",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset
|
||||
), patch(
|
||||
"homeassistant.components.thread.dataset_store.DatasetStore.async_add"
|
||||
) as mock_add:
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
mock_add.assert_called_once_with(config_entry.title, dataset.hex())
|
||||
assert issue_registry.async_get_issue(
|
||||
domain=otbr.DOMAIN, issue_id=f"insecure_thread_network_{config_entry.entry_id}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user