Compare commits

...

39 Commits

Author SHA1 Message Date
Bram Kragten c10175e25c Bump version to 2025.1.0b4 2024-12-30 20:06:44 +01:00
Bram Kragten 82f0e8cc19 Update frontend to 20241230.0 (#134284) 2024-12-30 20:06:32 +01:00
Andrew Jackson 623e1b08b8 Bump aiomealie to 0.9.5 (#134274) 2024-12-30 20:06:31 +01:00
Norbert Rittel 0c73251004 Remove excessive period at end of action name (#134272) 2024-12-30 20:06:30 +01:00
Arne Keller d9057fc43e ollama: update to 0.4.5 (#134265) 2024-12-30 20:06:29 +01:00
Josef Zweck 077c9e62b4 Bump pylamarzocco to 1.4.5 (#134259)
* Bump pylamarzocco to 1.4.4

* Bump pylamarzocco to 1.4.5

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-12-30 20:06:28 +01:00
tronikos 7456ce1c01 Fix 400 This voice does not support speaking rate or pitch parameters at this time for Google Cloud Journey voices (#134255) 2024-12-30 20:06:28 +01:00
tronikos a627fa70a7 Avoid KeyError for ignored entries in async_step_zeroconf of Android TV Remote (#134250) 2024-12-30 20:06:27 +01:00
Michael c402eaec3f Bump aiopegelonline to 0.1.1 (#134230)
bump aiopegelonline to 0.1.1
2024-12-30 20:06:26 +01:00
tronikos ea51ecd384 Bump opower to 0.8.7 (#134228)
* Bump opower to 0.8.7

* update deps
2024-12-30 20:06:25 +01:00
Artur Pragacz 0873d27d7b Fix Onkyo volume rounding (#134157) 2024-12-30 20:06:23 +01:00
G Johansson 45fd7fb6d5 Fix duplicate sensor disk entities in Systemmonitor (#134139) 2024-12-30 20:06:23 +01:00
Alberto Geniola e22685640c Bump elmax-api (#133845) 2024-12-30 20:06:22 +01:00
Adam Goode 5756166545 Quickly process unavailable metrics in Prometheus (#133219) 2024-12-30 20:06:21 +01:00
Norbert Rittel 2f8a92c725 Make triggers and condition for monetary sensor consistent (#131184) 2024-12-30 20:06:20 +01:00
Paul Daumlechner cf9ccc6fb4 Bump pyvlx to 0.2.26 (#115483) 2024-12-30 20:06:19 +01:00
Paulus Schoutsen b05b9b9a33 Bump version to 2025.1.0b3 2024-12-29 18:37:17 +00:00
Paulus Schoutsen 352d5d14a3 Bump frontend to 20241229.0 (#134225) 2024-12-29 18:37:04 +00:00
Michael Hansen 52e47f55c8 Bump VoIP utils to 0.2.2 (#134219) 2024-12-29 18:37:03 +00:00
Lucas Gasenzer 0470bff9a2 Fix Wake on LAN Port input as Box instead of Slider (#134216) 2024-12-29 18:37:02 +00:00
Michael a38839b420 Make feedreader recoverable (#134202)
raise ConfigEntryNotReady on connection errors during setup
2024-12-29 18:37:01 +00:00
Michael 394b2be40a Make PEGELONLINE recoverable (#134199) 2024-12-29 18:37:00 +00:00
Matthias Alphart 291dd6dc66 Update knx-frontend to 2024.12.26.233449 (#134184) 2024-12-29 18:36:59 +00:00
G Johansson ef87366346 Add missing device classes in scrape (#134141) 2024-12-29 18:36:57 +00:00
Joost Lekkerkerker bd243f68a4 Bump yt-dlp to 2024.12.23 (#134131) 2024-12-29 18:36:57 +00:00
Aaron Bach 951baa3972 Bump pytile to 2024.12.0 (#134103) 2024-12-29 18:36:56 +00:00
Joost Lekkerkerker 1874eec8b3 Bump python-homeassistant-analytics to 0.8.1 (#134101) 2024-12-29 18:36:55 +00:00
Joost Lekkerkerker 3120a90f26 Make elevenlabs recoverable (#134094)
* Make elevenlabs recoverable

* Add tests for entry setup

* Use the same fixtures for setup and config flow

* Update tests/components/elevenlabs/test_setup.py

Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com>

---------

Co-authored-by: Simon Sorg <simon.sorg@student.hpi.de>
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com>
2024-12-29 18:36:54 +00:00
Joost Lekkerkerker 7032361bf5 Make google tasks recoverable (#134092) 2024-12-29 18:36:53 +00:00
Matthias Alphart bd786b53ee Fix KNX config flow translations and add data descriptions (#134078)
* Fix KNX config flow translations and add data descriptions

* Update strings.json

* typo
2024-12-29 18:36:53 +00:00
Noah Husby f6a9cd38c0 Remove timeout from Russound RIO initialization (#134070) 2024-12-29 18:36:51 +00:00
Aaron Bach 1a909d3a8a Change SimpliSafe websocket reconnection log to DEBUG-level (#134063)
* Change SimpliSafe websocket reconnection log to `DEBUG`-level

* revert
2024-12-29 18:36:51 +00:00
Noah Husby b84ae2abc3 Bump aiorussound to 4.1.1 (#134058)
* Bump aiorussound to 4.1.1

* Trigger Build

* Trigger Build
2024-12-29 18:36:50 +00:00
G Johansson 15b80c59fc Cleanup devices in Nord Pool from reconfiguration (#134043)
* Cleanup devices in Nord Pool from reconfiguration

* Mods

* Mod
2024-12-29 18:36:49 +00:00
G Johansson c11bdcc949 Fix Nord Pool empty response (#134033)
* Fix Nord Pool empty response

* Mods

* reset validate prices
2024-12-29 18:36:48 +00:00
Allen Porter 1957ab1ccf Improve Google Tasks error messages (#134023) 2024-12-29 18:36:47 +00:00
Josef Zweck ef2af44795 Bump pylamarzocco to 1.4.3 (#134008) 2024-12-29 18:36:47 +00:00
J. Nick Koston f0e8360401 Ensure all states have been migrated to use timestamps (#134007) 2024-12-29 18:36:46 +00:00
Cyrill Raccaud 03fb136218 Fix swiss public transport line field none (#133964)
* fix #133116

The line can theoretically be none, when no line info is available (lets say walking sections first?)

* fix line field

* add unit test with missing line field
2024-12-29 18:36:45 +00:00
60 changed files with 1334 additions and 429 deletions
@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["python_homeassistant_analytics"],
"requirements": ["python-homeassistant-analytics==0.8.0"],
"requirements": ["python-homeassistant-analytics==0.8.1"],
"single_config_entry": true
}
@@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
# and one of them, which could end up being in discovery_info.host, is from a
# different device. If any of the discovery_info.ip_addresses matches the
# existing host, don't update the host.
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
if (
existing_config_entry
# Ignored entries don't have host
and CONF_HOST in existing_config_entry.data
and len(discovery_info.ip_addresses) > 1
):
existing_host = existing_config_entry.data[CONF_HOST]
if existing_host != self.host:
if existing_host in [
@@ -6,11 +6,16 @@ from dataclasses import dataclass
from elevenlabs import AsyncElevenLabs, Model
from elevenlabs.core import ApiError
from httpx import ConnectError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers.httpx_client import get_async_client
from .const import CONF_MODEL
@@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
model_id = entry.options[CONF_MODEL]
try:
model = await get_model_by_id(client, model_id)
except ConnectError as err:
raise ConfigEntryNotReady("Failed to connect") from err
except ApiError as err:
raise ConfigEntryAuthFailed("Auth failed") from err
@@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
port=self._panel_direct_port,
)
)
ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert)
ssl_context = await self.hass.async_add_executor_job(
build_direct_ssl_context, self._panel_direct_ssl_cert
)
# Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs.
client_api_url = get_direct_api_url(
+1 -1
View File
@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/elmax",
"iot_class": "cloud_polling",
"loggers": ["elmax_api"],
"requirements": ["elmax-api==0.0.6.3"],
"requirements": ["elmax-api==0.0.6.4rc0"],
"zeroconf": [
{
"type": "_elmax-ssl._tcp.local."
@@ -14,6 +14,7 @@ import feedparser
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
@@ -101,7 +102,11 @@ class FeedReaderCoordinator(
async def async_setup(self) -> None:
"""Set up the feed manager."""
feed = await self._async_fetch_feed()
try:
feed = await self._async_fetch_feed()
except UpdateFailed as err:
raise ConfigEntryNotReady from err
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
if feed_author := feed["feed"].get("author"):
self.feed_author = html.unescape(feed_author)
@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20241224.0"]
"requirements": ["home-assistant-frontend==20241230.0"]
}
@@ -20,6 +20,10 @@ CONF_GAIN = "gain"
CONF_PROFILES = "profiles"
CONF_TEXT_TYPE = "text_type"
DEFAULT_SPEED = 1.0
DEFAULT_PITCH = 0
DEFAULT_GAIN = 0
# STT constants
CONF_STT_MODEL = "stt_model"
@@ -31,7 +31,10 @@ from .const import (
CONF_SPEED,
CONF_TEXT_TYPE,
CONF_VOICE,
DEFAULT_GAIN,
DEFAULT_LANG,
DEFAULT_PITCH,
DEFAULT_SPEED,
)
DEFAULT_VOICE = ""
@@ -104,15 +107,15 @@ def tts_options_schema(
),
vol.Optional(
CONF_SPEED,
default=defaults.get(CONF_SPEED, 1.0),
default=defaults.get(CONF_SPEED, DEFAULT_SPEED),
): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)),
vol.Optional(
CONF_PITCH,
default=defaults.get(CONF_PITCH, 0),
default=defaults.get(CONF_PITCH, DEFAULT_PITCH),
): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)),
vol.Optional(
CONF_GAIN,
default=defaults.get(CONF_GAIN, 0),
default=defaults.get(CONF_GAIN, DEFAULT_GAIN),
): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)),
vol.Optional(
CONF_PROFILES,
+18 -3
View File
@@ -35,7 +35,10 @@ from .const import (
CONF_SPEED,
CONF_TEXT_TYPE,
CONF_VOICE,
DEFAULT_GAIN,
DEFAULT_LANG,
DEFAULT_PITCH,
DEFAULT_SPEED,
DOMAIN,
)
from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema
@@ -191,11 +194,23 @@ class BaseGoogleCloudProvider:
ssml_gender=gender,
name=voice,
),
# Avoid: "This voice does not support speaking rate or pitch parameters at this time."
# by not specifying the fields unless they differ from the defaults
audio_config=texttospeech.AudioConfig(
audio_encoding=encoding,
speaking_rate=options[CONF_SPEED],
pitch=options[CONF_PITCH],
volume_gain_db=options[CONF_GAIN],
speaking_rate=(
options[CONF_SPEED]
if options[CONF_SPEED] != DEFAULT_SPEED
else None
),
pitch=(
options[CONF_PITCH]
if options[CONF_PITCH] != DEFAULT_PITCH
else None
),
volume_gain_db=(
options[CONF_GAIN] if options[CONF_GAIN] != DEFAULT_GAIN else None
),
effects_profile_id=options[CONF_PROFILES],
),
)
+4 -3
View File
@@ -9,6 +9,7 @@ from google.oauth2.credentials import Credentials
from googleapiclient.discovery import Resource, build
from googleapiclient.errors import HttpError
from googleapiclient.http import BatchHttpRequest, HttpRequest
from httplib2 import ServerNotFoundError
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
@@ -115,7 +116,7 @@ class AsyncConfigEntryAuth:
def response_handler(_, response, exception: HttpError) -> None:
if exception is not None:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({exception.status_code})"
f"Google Tasks API responded with error ({exception.reason or exception.status_code})"
) from exception
if response:
data = json.loads(response)
@@ -150,9 +151,9 @@ class AsyncConfigEntryAuth:
async def _execute(self, request: HttpRequest | BatchHttpRequest) -> Any:
try:
result = await self._hass.async_add_executor_job(request.execute)
except HttpError as err:
except (HttpError, ServerNotFoundError) as err:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({err.status_code})"
f"Google Tasks API responded with: {err.reason or err.status_code})"
) from err
if result:
_raise_if_error(result)
+1 -1
View File
@@ -12,7 +12,7 @@
"requirements": [
"xknx==3.4.0",
"xknxproject==3.8.1",
"knx-frontend==2024.11.16.205004"
"knx-frontend==2024.12.26.233449"
],
"single_config_entry": true
}
+56 -28
View File
@@ -3,23 +3,30 @@
"step": {
"connection_type": {
"title": "KNX connection",
"description": "Please enter the connection type we should use for your KNX connection. \n AUTOMATIC - The integration takes care of the connectivity to your KNX Bus by performing a gateway scan. \n TUNNELING - The integration will connect to your KNX bus via tunneling. \n ROUTING - The integration will connect to your KNX bus via routing.",
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.",
"data": {
"connection_type": "KNX Connection Type"
},
"data_description": {
"connection_type": "Please select the connection type you want to use for your KNX connection."
}
},
"tunnel": {
"title": "Tunnel",
"description": "Please select a gateway from the list.",
"data": {
"gateway": "KNX Tunnel Connection"
"gateway": "Please select a gateway from the list."
},
"data_description": {
"gateway": "Select a KNX tunneling interface you want use for the connection."
}
},
"tcp_tunnel_endpoint": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "Tunnel endpoint",
"data": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "Select the tunnel endpoint used for the connection."
},
"data_description": {
"tunnel_endpoint_ia": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
}
},
"manual_tunnel": {
@@ -27,23 +34,24 @@
"description": "Please enter the connection information of your tunneling device.",
"data": {
"tunneling_type": "KNX Tunneling Type",
"port": "[%key:common::config_flow::data::port%]",
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]",
"route_back": "Route back / NAT mode",
"local_ip": "Local IP interface"
},
"data_description": {
"port": "Port of the KNX/IP tunneling device.",
"tunneling_type": "Select the tunneling type of your KNX/IP tunneling device. Older interfaces may only support `UDP`.",
"host": "IP address or hostname of the KNX/IP tunneling device.",
"port": "Port used by the KNX/IP tunneling device.",
"route_back": "Enable if your KNXnet/IP tunneling server is behind NAT. Only applies for UDP connections.",
"local_ip": "Local IP or interface name used for the connection from Home Assistant. Leave blank to use auto-discovery."
}
},
"secure_key_source_menu_tunnel": {
"title": "KNX IP-Secure",
"description": "Select how you want to configure KNX/IP Secure.",
"description": "How do you want to configure KNX/IP Secure?",
"menu_options": {
"secure_knxkeys": "Use a `.knxkeys` file containing IP secure keys",
"secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys",
"secure_tunnel_manual": "Configure IP secure credentials manually"
}
},
@@ -57,20 +65,23 @@
},
"secure_knxkeys": {
"title": "Import KNX Keyring",
"description": "Please select a `.knxkeys` file to import.",
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
"data": {
"knxkeys_file": "Keyring file",
"knxkeys_password": "The password to decrypt the `.knxkeys` file"
"knxkeys_password": "Keyring password"
},
"data_description": {
"knxkeys_password": "This was set when exporting the file from ETS."
"knxkeys_file": "Select a `.knxkeys` file. This can be exported from ETS.",
"knxkeys_password": "The password to open the `.knxkeys` file was set when exporting."
}
},
"knxkeys_tunnel_select": {
"title": "Tunnel endpoint",
"description": "Select the tunnel endpoint used for the connection.",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"secure_tunnel_manual": {
@@ -82,7 +93,7 @@
"device_authentication": "Device authentication password"
},
"data_description": {
"user_id": "This is often tunnel number +1. So 'Tunnel 2' would have User-ID '3'.",
"user_id": "This usually is tunnel number +1. So first tunnel in the list presented in ETS would have User-ID `2`.",
"user_password": "Password for the specific tunnel connection set in the 'Properties' panel of the tunnel in ETS.",
"device_authentication": "This is set in the 'IP' panel of the interface in ETS."
}
@@ -95,8 +106,8 @@
"sync_latency_tolerance": "Network latency tolerance"
},
"data_description": {
"backbone_key": "Can be seen in the 'Security' report of an ETS project. Eg. '00112233445566778899AABBCCDDEEFF'",
"sync_latency_tolerance": "Default is 1000."
"backbone_key": "Can be seen in the 'Security' report of your ETS project. Eg. `00112233445566778899AABBCCDDEEFF`",
"sync_latency_tolerance": "Should be equal to the backbone configuration of your ETS project. Default is `1000`"
}
},
"routing": {
@@ -104,13 +115,16 @@
"description": "Please configure the routing options.",
"data": {
"individual_address": "Individual address",
"routing_secure": "Use KNX IP Secure",
"routing_secure": "KNX IP Secure Routing",
"multicast_group": "Multicast group",
"multicast_port": "Multicast port",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
},
"data_description": {
"individual_address": "KNX address to be used by Home Assistant, e.g. `0.0.4`",
"routing_secure": "Select if your installation uses encrypted communication according to the KNX IP Secure standard. This setting requires compatible devices and configuration. You'll be prompted for credentials in the next step.",
"multicast_group": "Multicast group used by your installation. Default is `224.0.23.12`",
"multicast_port": "Multicast port used by your installation. Default is `3671`",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
}
}
@@ -148,7 +162,7 @@
},
"data_description": {
"state_updater": "Set default for reading states from the KNX Bus. When disabled, Home Assistant will not actively retrieve entity states from the KNX Bus. Can be overridden by `sync_state` entity options.",
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: 0 or 20 to 40",
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
}
},
@@ -157,20 +171,27 @@
"description": "[%key:component::knx::config::step::connection_type::description%]",
"data": {
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
},
"data_description": {
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
}
},
"tunnel": {
"title": "[%key:component::knx::config::step::tunnel::title%]",
"description": "[%key:component::knx::config::step::tunnel::description%]",
"data": {
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
},
"data_description": {
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
}
},
"tcp_tunnel_endpoint": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"manual_tunnel": {
@@ -184,6 +205,7 @@
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
},
"data_description": {
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
@@ -214,14 +236,17 @@
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
},
"data_description": {
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
}
},
"knxkeys_tunnel_select": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"user_id": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"secure_tunnel_manual": {
@@ -262,6 +287,9 @@
},
"data_description": {
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
}
}
@@ -37,5 +37,5 @@
"iot_class": "cloud_polling",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==1.4.2"]
"requirements": ["pylamarzocco==1.4.5"]
}
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/mealie",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["aiomealie==0.9.4"]
"requirements": ["aiomealie==0.9.5"]
}
@@ -8,6 +8,6 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp[default]==2024.12.13"],
"requirements": ["yt-dlp[default]==2024.12.23"],
"single_config_entry": true
}
+32 -8
View File
@@ -5,11 +5,11 @@ from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from .const import DOMAIN, PLATFORMS
from .const import CONF_AREAS, DOMAIN, LOGGER, PLATFORMS
from .coordinator import NordPoolDataUpdateCoordinator
from .services import async_setup_services
@@ -25,10 +25,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
async def async_setup_entry(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> bool:
"""Set up Nord Pool from a config entry."""
coordinator = NordPoolDataUpdateCoordinator(hass, entry)
await cleanup_device(hass, config_entry)
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
await coordinator.fetch_data(dt_util.utcnow())
if not coordinator.last_update_success:
raise ConfigEntryNotReady(
@@ -36,13 +40,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) ->
translation_key="initial_update_failed",
translation_placeholders={"error": str(coordinator.last_exception)},
)
entry.runtime_data = coordinator
config_entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> bool:
"""Unload Nord Pool config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
async def cleanup_device(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> None:
"""Cleanup device and entities."""
device_reg = dr.async_get(hass)
entries = dr.async_entries_for_config_entry(device_reg, config_entry.entry_id)
for area in config_entry.data[CONF_AREAS]:
for entry in entries:
if entry.identifiers == {(DOMAIN, area)}:
continue
LOGGER.debug("Removing device %s", entry.name)
device_reg.async_update_device(
entry.id, remove_config_entry_id=config_entry.entry_id
)
@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
from datetime import datetime, timedelta
from typing import TYPE_CHECKING
@@ -73,7 +72,7 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
)
data = await self.api_call()
if data:
if data and data.entries:
self.async_set_updated_data(data)
async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None:
@@ -90,18 +89,20 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
self.config_entry.data[CONF_AREAS],
)
except (
NordPoolEmptyResponseError,
NordPoolResponseError,
NordPoolError,
) as error:
LOGGER.debug("Connection error: %s", error)
if retry > 0:
next_run = (4 - retry) * 15
LOGGER.debug("Wait %d seconds for next try", next_run)
await asyncio.sleep(next_run)
return await self.api_call(retry - 1)
self.async_set_update_error(error)
if data:
current_day = dt_util.utcnow().strftime("%Y-%m-%d")
for entry in data.entries:
if entry.requested_date == current_day:
LOGGER.debug("Data for current day found")
return data
self.async_set_update_error(NordPoolEmptyResponseError("No current day data"))
return data
def merge_price_entries(self) -> list[DeliveryPeriodEntry]:
@@ -8,6 +8,6 @@
"iot_class": "cloud_polling",
"loggers": ["pynordpool"],
"quality_scale": "platinum",
"requirements": ["pynordpool==0.2.3"],
"requirements": ["pynordpool==0.2.4"],
"single_config_entry": true
}
@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/ollama",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["ollama==0.3.3"]
"requirements": ["ollama==0.4.5"]
}
@@ -427,7 +427,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
"""
# HA_VOL * (MAX VOL / 100) * VOL_RESOLUTION
self._update_receiver(
"volume", int(volume * (self._max_volume / 100) * self._volume_resolution)
"volume", round(volume * (self._max_volume / 100) * self._volume_resolution)
)
async def async_volume_up(self) -> None:
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/opower",
"iot_class": "cloud_polling",
"loggers": ["opower"],
"requirements": ["opower==0.8.6"]
"requirements": ["opower==0.8.7"]
}
@@ -5,10 +5,12 @@ from __future__ import annotations
import logging
from aiopegelonline import PegelOnline
from aiopegelonline.const import CONNECT_ERRORS
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_STATION
@@ -28,7 +30,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PegelOnlineConfigEntry)
_LOGGER.debug("Setting up station with uuid %s", station_uuid)
api = PegelOnline(async_get_clientsession(hass))
station = await api.async_get_station_details(station_uuid)
try:
station = await api.async_get_station_details(station_uuid)
except CONNECT_ERRORS as err:
raise ConfigEntryNotReady("Failed to connect") from err
coordinator = PegelOnlineDataUpdateCoordinator(hass, entry.title, api, station)
@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aiopegelonline"],
"requirements": ["aiopegelonline==0.1.0"]
"requirements": ["aiopegelonline==0.1.1"]
}
+293 -254
View File
@@ -2,8 +2,9 @@
from __future__ import annotations
from collections import defaultdict
from collections.abc import Callable
from contextlib import suppress
from dataclasses import astuple, dataclass
import logging
import string
from typing import Any, cast
@@ -158,6 +159,22 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
@dataclass(frozen=True, slots=True)
class MetricNameWithLabelValues:
"""Class to represent a metric with its label values.
The prometheus client library doesn't easily allow us to get back the
information we put into it. Specifically, it is very expensive to query
which label values have been set for metrics.
This class is used to hold a bit of data we need to efficiently remove
labelsets from metrics.
"""
metric_name: str
label_values: tuple[str, ...]
class PrometheusMetrics:
"""Model all of the metrics which should be exposed to Prometheus."""
@@ -191,6 +208,9 @@ class PrometheusMetrics:
else:
self.metrics_prefix = ""
self._metrics: dict[str, MetricWrapperBase] = {}
self._metrics_by_entity_id: dict[str, set[MetricNameWithLabelValues]] = (
defaultdict(set)
)
self._climate_units = climate_units
def handle_state_changed_event(self, event: Event[EventStateChangedData]) -> None:
@@ -202,10 +222,12 @@ class PrometheusMetrics:
_LOGGER.debug("Filtered out entity %s", state.entity_id)
return
if (old_state := event.data.get("old_state")) is not None and (
old_friendly_name := old_state.attributes.get(ATTR_FRIENDLY_NAME)
if (
old_state := event.data.get("old_state")
) is not None and old_state.attributes.get(
ATTR_FRIENDLY_NAME
) != state.attributes.get(ATTR_FRIENDLY_NAME):
self._remove_labelsets(old_state.entity_id, old_friendly_name)
self._remove_labelsets(old_state.entity_id)
self.handle_state(state)
@@ -215,30 +237,32 @@ class PrometheusMetrics:
_LOGGER.debug("Handling state update for %s", entity_id)
labels = self._labels(state)
state_change = self._metric(
"state_change", prometheus_client.Counter, "The number of state changes"
)
state_change.labels(**labels).inc()
entity_available = self._metric(
self._metric(
"state_change",
prometheus_client.Counter,
"The number of state changes",
labels,
).inc()
self._metric(
"entity_available",
prometheus_client.Gauge,
"Entity is available (not in the unavailable or unknown state)",
)
entity_available.labels(**labels).set(float(state.state not in IGNORED_STATES))
labels,
).set(float(state.state not in IGNORED_STATES))
last_updated_time_seconds = self._metric(
self._metric(
"last_updated_time_seconds",
prometheus_client.Gauge,
"The last_updated timestamp",
)
last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp())
labels,
).set(state.last_updated.timestamp())
if state.state in IGNORED_STATES:
self._remove_labelsets(
entity_id,
None,
{state_change, entity_available, last_updated_time_seconds},
{"state_change", "entity_available", "last_updated_time_seconds"},
)
else:
domain, _ = hacore.split_entity_id(entity_id)
@@ -274,67 +298,68 @@ class PrometheusMetrics:
def _remove_labelsets(
self,
entity_id: str,
friendly_name: str | None = None,
ignored_metrics: set[MetricWrapperBase] | None = None,
ignored_metric_names: set[str] | None = None,
) -> None:
"""Remove labelsets matching the given entity id from all non-ignored metrics."""
if ignored_metrics is None:
ignored_metrics = set()
for metric in list(self._metrics.values()):
if metric in ignored_metrics:
if ignored_metric_names is None:
ignored_metric_names = set()
metric_set = self._metrics_by_entity_id[entity_id]
removed_metrics = set()
for metric in metric_set:
metric_name, label_values = astuple(metric)
if metric_name in ignored_metric_names:
continue
for sample in cast(list[prometheus_client.Metric], metric.collect())[
0
].samples:
if sample.labels["entity"] == entity_id and (
not friendly_name or sample.labels["friendly_name"] == friendly_name
):
_LOGGER.debug(
"Removing labelset from %s for entity_id: %s",
sample.name,
entity_id,
)
with suppress(KeyError):
metric.remove(*sample.labels.values())
_LOGGER.debug(
"Removing labelset %s from %s for entity_id: %s",
label_values,
metric_name,
entity_id,
)
removed_metrics.add(metric)
self._metrics[metric_name].remove(*label_values)
metric_set -= removed_metrics
if not metric_set:
del self._metrics_by_entity_id[entity_id]
def _handle_attributes(self, state: State) -> None:
for key, value in state.attributes.items():
metric = self._metric(
try:
value = float(value)
except (ValueError, TypeError):
continue
self._metric(
f"{state.domain}_attr_{key.lower()}",
prometheus_client.Gauge,
f"{key} attribute of {state.domain} entity",
)
try:
value = float(value)
metric.labels(**self._labels(state)).set(value)
except (ValueError, TypeError):
pass
self._labels(state),
).set(value)
def _metric[_MetricBaseT: MetricWrapperBase](
self,
metric: str,
metric_name: str,
factory: type[_MetricBaseT],
documentation: str,
extra_labels: list[str] | None = None,
labels: dict[str, str],
) -> _MetricBaseT:
labels = ["entity", "friendly_name", "domain"]
if extra_labels is not None:
labels.extend(extra_labels)
try:
return cast(_MetricBaseT, self._metrics[metric])
metric = cast(_MetricBaseT, self._metrics[metric_name])
except KeyError:
full_metric_name = self._sanitize_metric_name(
f"{self.metrics_prefix}{metric}"
f"{self.metrics_prefix}{metric_name}"
)
self._metrics[metric] = factory(
self._metrics[metric_name] = factory(
full_metric_name,
documentation,
labels,
labels.keys(),
registry=prometheus_client.REGISTRY,
)
return cast(_MetricBaseT, self._metrics[metric])
metric = cast(_MetricBaseT, self._metrics[metric_name])
self._metrics_by_entity_id[labels["entity"]].add(
MetricNameWithLabelValues(metric_name, tuple(labels.values()))
)
return metric.labels(**labels)
@staticmethod
def _sanitize_metric_name(metric: str) -> str:
@@ -356,67 +381,90 @@ class PrometheusMetrics:
return value
@staticmethod
def _labels(state: State) -> dict[str, Any]:
return {
def _labels(
state: State,
extra_labels: dict[str, str] | None = None,
) -> dict[str, Any]:
if extra_labels is None:
extra_labels = {}
labels = {
"entity": state.entity_id,
"domain": state.domain,
"friendly_name": state.attributes.get(ATTR_FRIENDLY_NAME),
}
if not labels.keys().isdisjoint(extra_labels.keys()):
conflicting_keys = labels.keys() & extra_labels.keys()
raise ValueError(
f"extra_labels contains conflicting keys: {conflicting_keys}"
)
return labels | extra_labels
def _battery(self, state: State) -> None:
if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is not None:
metric = self._metric(
"battery_level_percent",
prometheus_client.Gauge,
"Battery level as a percentage of its capacity",
)
try:
value = float(battery_level)
metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is None:
return
try:
value = float(battery_level)
except ValueError:
return
self._metric(
"battery_level_percent",
prometheus_client.Gauge,
"Battery level as a percentage of its capacity",
self._labels(state),
).set(value)
def _handle_binary_sensor(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
self._metric(
"binary_sensor_state",
prometheus_client.Gauge,
"State of the binary sensor (0/1)",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _handle_input_boolean(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
self._metric(
"input_boolean_state",
prometheus_client.Gauge,
"State of the input boolean (0/1)",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _numeric_handler(self, state: State, domain: str, title: str) -> None:
if (value := self.state_as_number(state)) is None:
return
if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)):
metric = self._metric(
f"{domain}_state_{unit}",
prometheus_client.Gauge,
f"State of the {title} measured in {unit}",
self._labels(state),
)
else:
metric = self._metric(
f"{domain}_state",
prometheus_client.Gauge,
f"State of the {title}",
self._labels(state),
)
if (value := self.state_as_number(state)) is not None:
if (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== UnitOfTemperature.FAHRENHEIT
):
value = TemperatureConverter.convert(
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
metric.labels(**self._labels(state)).set(value)
if (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== UnitOfTemperature.FAHRENHEIT
):
value = TemperatureConverter.convert(
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
metric.set(value)
def _handle_input_number(self, state: State) -> None:
self._numeric_handler(state, "input_number", "input number")
@@ -425,88 +473,99 @@ class PrometheusMetrics:
self._numeric_handler(state, "number", "number")
def _handle_device_tracker(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
self._metric(
"device_tracker_state",
prometheus_client.Gauge,
"State of the device tracker (0/1)",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _handle_person(self, state: State) -> None:
metric = self._metric(
"person_state", prometheus_client.Gauge, "State of the person (0/1)"
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
if (value := self.state_as_number(state)) is None:
return
self._metric(
"person_state",
prometheus_client.Gauge,
"State of the person (0/1)",
self._labels(state),
).set(value)
def _handle_cover(self, state: State) -> None:
metric = self._metric(
"cover_state",
prometheus_client.Gauge,
"State of the cover (0/1)",
["state"],
)
cover_states = [STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING]
for cover_state in cover_states:
metric.labels(**dict(self._labels(state), state=cover_state)).set(
float(cover_state == state.state)
metric = self._metric(
"cover_state",
prometheus_client.Gauge,
"State of the cover (0/1)",
self._labels(state, {"state": cover_state}),
)
metric.set(float(cover_state == state.state))
position = state.attributes.get(ATTR_CURRENT_POSITION)
if position is not None:
position_metric = self._metric(
self._metric(
"cover_position",
prometheus_client.Gauge,
"Position of the cover (0-100)",
)
position_metric.labels(**self._labels(state)).set(float(position))
self._labels(state),
).set(float(position))
tilt_position = state.attributes.get(ATTR_CURRENT_TILT_POSITION)
if tilt_position is not None:
tilt_position_metric = self._metric(
self._metric(
"cover_tilt_position",
prometheus_client.Gauge,
"Tilt Position of the cover (0-100)",
)
tilt_position_metric.labels(**self._labels(state)).set(float(tilt_position))
self._labels(state),
).set(float(tilt_position))
def _handle_light(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
brightness = state.attributes.get(ATTR_BRIGHTNESS)
if state.state == STATE_ON and brightness is not None:
value = float(brightness) / 255.0
value = value * 100
self._metric(
"light_brightness_percent",
prometheus_client.Gauge,
"Light brightness percentage (0..100)",
)
if (value := self.state_as_number(state)) is not None:
brightness = state.attributes.get(ATTR_BRIGHTNESS)
if state.state == STATE_ON and brightness is not None:
value = float(brightness) / 255.0
value = value * 100
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _handle_lock(self, state: State) -> None:
metric = self._metric(
"lock_state", prometheus_client.Gauge, "State of the lock (0/1)"
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
if (value := self.state_as_number(state)) is None:
return
self._metric(
"lock_state",
prometheus_client.Gauge,
"State of the lock (0/1)",
self._labels(state),
).set(value)
def _handle_climate_temp(
self, state: State, attr: str, metric_name: str, metric_description: str
) -> None:
if (temp := state.attributes.get(attr)) is not None:
if self._climate_units == UnitOfTemperature.FAHRENHEIT:
temp = TemperatureConverter.convert(
temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
metric = self._metric(
metric_name,
prometheus_client.Gauge,
metric_description,
if (temp := state.attributes.get(attr)) is None:
return
if self._climate_units == UnitOfTemperature.FAHRENHEIT:
temp = TemperatureConverter.convert(
temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
metric.labels(**self._labels(state)).set(temp)
self._metric(
metric_name,
prometheus_client.Gauge,
metric_description,
self._labels(state),
).set(temp)
def _handle_climate(self, state: State) -> None:
self._handle_climate_temp(
@@ -535,90 +594,75 @@ class PrometheusMetrics:
)
if current_action := state.attributes.get(ATTR_HVAC_ACTION):
metric = self._metric(
"climate_action",
prometheus_client.Gauge,
"HVAC action",
["action"],
)
for action in HVACAction:
metric.labels(**dict(self._labels(state), action=action.value)).set(
float(action == current_action)
)
self._metric(
"climate_action",
prometheus_client.Gauge,
"HVAC action",
self._labels(state, {"action": action.value}),
).set(float(action == current_action))
current_mode = state.state
available_modes = state.attributes.get(ATTR_HVAC_MODES)
if current_mode and available_modes:
metric = self._metric(
"climate_mode",
prometheus_client.Gauge,
"HVAC mode",
["mode"],
)
for mode in available_modes:
metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == current_mode)
)
self._metric(
"climate_mode",
prometheus_client.Gauge,
"HVAC mode",
self._labels(state, {"mode": mode}),
).set(float(mode == current_mode))
preset_mode = state.attributes.get(ATTR_PRESET_MODE)
available_preset_modes = state.attributes.get(ATTR_PRESET_MODES)
if preset_mode and available_preset_modes:
preset_metric = self._metric(
"climate_preset_mode",
prometheus_client.Gauge,
"Preset mode enum",
["mode"],
)
for mode in available_preset_modes:
preset_metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == preset_mode)
)
self._metric(
"climate_preset_mode",
prometheus_client.Gauge,
"Preset mode enum",
self._labels(state, {"mode": mode}),
).set(float(mode == preset_mode))
fan_mode = state.attributes.get(ATTR_FAN_MODE)
available_fan_modes = state.attributes.get(ATTR_FAN_MODES)
if fan_mode and available_fan_modes:
fan_mode_metric = self._metric(
"climate_fan_mode",
prometheus_client.Gauge,
"Fan mode enum",
["mode"],
)
for mode in available_fan_modes:
fan_mode_metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == fan_mode)
)
self._metric(
"climate_fan_mode",
prometheus_client.Gauge,
"Fan mode enum",
self._labels(state, {"mode": mode}),
).set(float(mode == fan_mode))
def _handle_humidifier(self, state: State) -> None:
humidifier_target_humidity_percent = state.attributes.get(ATTR_HUMIDITY)
if humidifier_target_humidity_percent:
metric = self._metric(
self._metric(
"humidifier_target_humidity_percent",
prometheus_client.Gauge,
"Target Relative Humidity",
)
metric.labels(**self._labels(state)).set(humidifier_target_humidity_percent)
self._labels(state),
).set(humidifier_target_humidity_percent)
metric = self._metric(
"humidifier_state",
prometheus_client.Gauge,
"State of the humidifier (0/1)",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._metric(
"humidifier_state",
prometheus_client.Gauge,
"State of the humidifier (0/1)",
self._labels(state),
).set(value)
current_mode = state.attributes.get(ATTR_MODE)
available_modes = state.attributes.get(ATTR_AVAILABLE_MODES)
if current_mode and available_modes:
metric = self._metric(
"humidifier_mode",
prometheus_client.Gauge,
"Humidifier Mode",
["mode"],
)
for mode in available_modes:
metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == current_mode)
)
self._metric(
"humidifier_mode",
prometheus_client.Gauge,
"Humidifier Mode",
self._labels(state, {"mode": mode}),
).set(float(mode == current_mode))
def _handle_sensor(self, state: State) -> None:
unit = self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT))
@@ -628,22 +672,24 @@ class PrometheusMetrics:
if metric is not None:
break
if metric is not None:
if metric is not None and (value := self.state_as_number(state)) is not None:
documentation = "State of the sensor"
if unit:
documentation = f"Sensor data measured in {unit}"
_metric = self._metric(metric, prometheus_client.Gauge, documentation)
if (value := self.state_as_number(state)) is not None:
if (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== UnitOfTemperature.FAHRENHEIT
):
value = TemperatureConverter.convert(
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
_metric.labels(**self._labels(state)).set(value)
if (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== UnitOfTemperature.FAHRENHEIT
):
value = TemperatureConverter.convert(
value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS
)
self._metric(
metric,
prometheus_client.Gauge,
documentation,
self._labels(state),
).set(value)
self._battery(state)
@@ -702,114 +748,107 @@ class PrometheusMetrics:
return units.get(unit, default)
def _handle_switch(self, state: State) -> None:
metric = self._metric(
"switch_state", prometheus_client.Gauge, "State of the switch (0/1)"
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._metric(
"switch_state",
prometheus_client.Gauge,
"State of the switch (0/1)",
self._labels(state),
).set(value)
self._handle_attributes(state)
def _handle_fan(self, state: State) -> None:
metric = self._metric(
"fan_state", prometheus_client.Gauge, "State of the fan (0/1)"
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._metric(
"fan_state",
prometheus_client.Gauge,
"State of the fan (0/1)",
self._labels(state),
).set(value)
fan_speed_percent = state.attributes.get(ATTR_PERCENTAGE)
if fan_speed_percent is not None:
fan_speed_metric = self._metric(
self._metric(
"fan_speed_percent",
prometheus_client.Gauge,
"Fan speed percent (0-100)",
)
fan_speed_metric.labels(**self._labels(state)).set(float(fan_speed_percent))
self._labels(state),
).set(float(fan_speed_percent))
fan_is_oscillating = state.attributes.get(ATTR_OSCILLATING)
if fan_is_oscillating is not None:
fan_oscillating_metric = self._metric(
self._metric(
"fan_is_oscillating",
prometheus_client.Gauge,
"Whether the fan is oscillating (0/1)",
)
fan_oscillating_metric.labels(**self._labels(state)).set(
float(fan_is_oscillating)
)
self._labels(state),
).set(float(fan_is_oscillating))
fan_preset_mode = state.attributes.get(ATTR_PRESET_MODE)
available_modes = state.attributes.get(ATTR_PRESET_MODES)
if fan_preset_mode and available_modes:
fan_preset_metric = self._metric(
"fan_preset_mode",
prometheus_client.Gauge,
"Fan preset mode enum",
["mode"],
)
for mode in available_modes:
fan_preset_metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == fan_preset_mode)
)
self._metric(
"fan_preset_mode",
prometheus_client.Gauge,
"Fan preset mode enum",
self._labels(state, {"mode": mode}),
).set(float(mode == fan_preset_mode))
fan_direction = state.attributes.get(ATTR_DIRECTION)
if fan_direction is not None:
fan_direction_metric = self._metric(
if fan_direction in {DIRECTION_FORWARD, DIRECTION_REVERSE}:
self._metric(
"fan_direction_reversed",
prometheus_client.Gauge,
"Fan direction reversed (bool)",
)
if fan_direction == DIRECTION_FORWARD:
fan_direction_metric.labels(**self._labels(state)).set(0)
elif fan_direction == DIRECTION_REVERSE:
fan_direction_metric.labels(**self._labels(state)).set(1)
self._labels(state),
).set(float(fan_direction == DIRECTION_REVERSE))
def _handle_zwave(self, state: State) -> None:
self._battery(state)
def _handle_automation(self, state: State) -> None:
metric = self._metric(
self._metric(
"automation_triggered_count",
prometheus_client.Counter,
"Count of times an automation has been triggered",
)
metric.labels(**self._labels(state)).inc()
self._labels(state),
).inc()
def _handle_counter(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
self._metric(
"counter_value",
prometheus_client.Gauge,
"Value of counter entities",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _handle_update(self, state: State) -> None:
metric = self._metric(
if (value := self.state_as_number(state)) is None:
return
self._metric(
"update_state",
prometheus_client.Gauge,
"Update state, indicating if an update is available (0/1)",
)
if (value := self.state_as_number(state)) is not None:
metric.labels(**self._labels(state)).set(value)
self._labels(state),
).set(value)
def _handle_alarm_control_panel(self, state: State) -> None:
current_state = state.state
if current_state:
metric = self._metric(
"alarm_control_panel_state",
prometheus_client.Gauge,
"State of the alarm control panel (0/1)",
["state"],
)
for alarm_state in AlarmControlPanelState:
metric.labels(**dict(self._labels(state), state=alarm_state.value)).set(
float(alarm_state.value == current_state)
)
self._metric(
"alarm_control_panel_state",
prometheus_client.Gauge,
"State of the alarm control panel (0/1)",
self._labels(state, {"state": alarm_state.value}),
).set(float(alarm_state.value == current_state))
class PrometheusView(HomeAssistantView):
@@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 47
SCHEMA_VERSION = 48
_LOGGER = logging.getLogger(__name__)
+15 -2
View File
@@ -1976,6 +1976,17 @@ class _SchemaVersion47Migrator(_SchemaVersionMigrator, target_version=47):
)
class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48):
def _apply_update(self) -> None:
"""Version specific update method."""
# https://github.com/home-assistant/core/issues/134002
# If the system has unmigrated states rows, we need to
# ensure they are migrated now so the new optimized
# queries can be used. For most systems, this should
# be very fast and nothing will be migrated.
_migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine)
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
hass: HomeAssistant,
instance: Recorder,
@@ -2109,7 +2120,8 @@ def _migrate_columns_to_timestamp(
connection.execute(
text(
'UPDATE events set time_fired_ts=strftime("%s",time_fired) + '
"cast(substr(time_fired,-7) AS FLOAT);"
"cast(substr(time_fired,-7) AS FLOAT) "
"WHERE time_fired_ts is NULL;"
)
)
connection.execute(
@@ -2117,7 +2129,8 @@ def _migrate_columns_to_timestamp(
'UPDATE states set last_updated_ts=strftime("%s",last_updated) + '
"cast(substr(last_updated,-7) AS FLOAT), "
'last_changed_ts=strftime("%s",last_changed) + '
"cast(substr(last_changed,-7) AS FLOAT);"
"cast(substr(last_changed,-7) AS FLOAT) "
" WHERE last_updated_ts is NULL;"
)
)
elif engine.dialect.name == SupportedDialect.MYSQL:
@@ -1,6 +1,5 @@
"""The russound_rio component."""
import asyncio
import logging
from aiorussound import RussoundClient, RussoundTcpConnectionHandler
@@ -11,7 +10,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS
from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS
PLATFORMS = [Platform.MEDIA_PLAYER]
@@ -40,8 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) ->
await client.register_state_update_callbacks(_connection_update_callback)
try:
async with asyncio.timeout(CONNECT_TIMEOUT):
await client.connect()
await client.connect()
except RUSSOUND_RIO_EXCEPTIONS as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
import logging
from typing import Any
@@ -17,7 +16,7 @@ from homeassistant.config_entries import (
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers import config_validation as cv
from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS
from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS
DATA_SCHEMA = vol.Schema(
{
@@ -45,10 +44,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
client = RussoundClient(RussoundTcpConnectionHandler(host, port))
try:
async with asyncio.timeout(CONNECT_TIMEOUT):
await client.connect()
controller = client.controllers[1]
await client.disconnect()
await client.connect()
controller = client.controllers[1]
await client.disconnect()
except RUSSOUND_RIO_EXCEPTIONS:
_LOGGER.exception("Could not connect to Russound RIO")
errors["base"] = "cannot_connect"
@@ -90,10 +88,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
# Connection logic is repeated here since this method will be removed in future releases
client = RussoundClient(RussoundTcpConnectionHandler(host, port))
try:
async with asyncio.timeout(CONNECT_TIMEOUT):
await client.connect()
controller = client.controllers[1]
await client.disconnect()
await client.connect()
controller = client.controllers[1]
await client.disconnect()
except RUSSOUND_RIO_EXCEPTIONS:
_LOGGER.exception("Could not connect to Russound RIO")
return self.async_abort(
@@ -16,9 +16,6 @@ RUSSOUND_RIO_EXCEPTIONS = (
asyncio.CancelledError,
)
CONNECT_TIMEOUT = 15
MP_FEATURES_BY_FLAG = {
FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE
}
@@ -7,5 +7,5 @@
"iot_class": "local_push",
"loggers": ["aiorussound"],
"quality_scale": "silver",
"requirements": ["aiorussound==4.1.0"]
"requirements": ["aiorussound==4.1.1"]
}
@@ -141,8 +141,10 @@
"options": {
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
"area": "[%key:component::sensor::entity_component::area::name%]",
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
"battery": "[%key:component::sensor::entity_component::battery::name%]",
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
+2 -2
View File
@@ -23,7 +23,7 @@
"is_illuminance": "Current {entity_name} illuminance",
"is_irradiance": "Current {entity_name} irradiance",
"is_moisture": "Current {entity_name} moisture",
"is_monetary": "Current {entity_name} money",
"is_monetary": "Current {entity_name} balance",
"is_nitrogen_dioxide": "Current {entity_name} nitrogen dioxide concentration level",
"is_nitrogen_monoxide": "Current {entity_name} nitrogen monoxide concentration level",
"is_nitrous_oxide": "Current {entity_name} nitrous oxide concentration level",
@@ -75,7 +75,7 @@
"illuminance": "{entity_name} illuminance changes",
"irradiance": "{entity_name} irradiance changes",
"moisture": "{entity_name} moisture changes",
"monetary": "{entity_name} money changes",
"monetary": "{entity_name} balance changes",
"nitrogen_dioxide": "{entity_name} nitrogen dioxide concentration changes",
"nitrogen_monoxide": "{entity_name} nitrogen monoxide concentration changes",
"nitrous_oxide": "{entity_name} nitrous oxide concentration changes",
@@ -485,7 +485,7 @@ class SimpliSafe:
except Exception as err: # noqa: BLE001
LOGGER.error("Unknown exception while connecting to websocket: %s", err)
LOGGER.warning("Reconnecting to websocket")
LOGGER.debug("Reconnecting to websocket")
await self._async_cancel_websocket_loop()
self._websocket_reconnect_task = self._hass.async_create_task(
self._async_start_websocket_loop()
@@ -113,7 +113,7 @@ class SwissPublicTransportDataUpdateCoordinator(
destination=self._opendata.to_name,
remaining_time=str(self.remaining_time(connections[i]["departure"])),
delay=connections[i]["delay"],
line=connections[i]["line"],
line=connections[i].get("line"),
)
for i in range(limit)
if len(connections) > i and connections[i] is not None
@@ -134,7 +134,7 @@ class SwissPublicTransportDataUpdateCoordinator(
"train_number": connection["train_number"],
"transfers": connection["transfers"],
"delay": connection["delay"],
"line": connection["line"],
"line": connection.get("line"),
}
for connection in await self.fetch_connections(limit)
]
@@ -429,16 +429,17 @@ async def async_setup_entry(
is_enabled = check_legacy_resource(
f"{_type}_{argument}", legacy_resources
)
loaded_resources.add(slugify(f"{_type}_{argument}"))
entities.append(
SystemMonitorSensor(
coordinator,
sensor_description,
entry.entry_id,
argument,
is_enabled,
if (_add := slugify(f"{_type}_{argument}")) not in loaded_resources:
loaded_resources.add(_add)
entities.append(
SystemMonitorSensor(
coordinator,
sensor_description,
entry.entry_id,
argument,
is_enabled,
)
)
)
continue
if _type.startswith("ipv"):
+1 -1
View File
@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["pytile"],
"requirements": ["pytile==2023.12.0"]
"requirements": ["pytile==2024.12.0"]
}
+1 -1
View File
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/velux",
"iot_class": "local_polling",
"loggers": ["pyvlx"],
"requirements": ["pyvlx==0.2.21"]
"requirements": ["pyvlx==0.2.26"]
}
+1 -1
View File
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/voip",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["voip-utils==0.2.1"]
"requirements": ["voip-utils==0.2.2"]
}
@@ -15,3 +15,4 @@ send_magic_packet:
number:
min: 1
max: 65535
mode: "box"
@@ -290,7 +290,7 @@
"name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]"
}
},
"name": "Bulk set partial configuration parameters (advanced)."
"name": "Bulk set partial configuration parameters (advanced)"
},
"clear_lock_usercode": {
"description": "Clears a user code from a lock.",
+1 -1
View File
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 1
PATCH_VERSION: Final = "0b2"
PATCH_VERSION: Final = "0b4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
+1 -1
View File
@@ -35,7 +35,7 @@ habluetooth==3.6.0
hass-nabucasa==0.87.0
hassil==2.0.5
home-assistant-bluetooth==1.13.0
home-assistant-frontend==20241224.0
home-assistant-frontend==20241230.0
home-assistant-intents==2024.12.20
httpx==0.27.2
ifaddr==0.2.0
+1 -1
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.1.0b2"
version = "2025.1.0b4"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
+15 -15
View File
@@ -294,7 +294,7 @@ aiolookin==1.0.0
aiolyric==2.0.1
# homeassistant.components.mealie
aiomealie==0.9.4
aiomealie==0.9.5
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -321,7 +321,7 @@ aioopenexchangerates==0.6.8
aiooui==0.1.7
# homeassistant.components.pegel_online
aiopegelonline==0.1.0
aiopegelonline==0.1.1
# homeassistant.components.acmeda
aiopulse==0.4.6
@@ -356,7 +356,7 @@ aioridwell==2024.01.0
aioruckus==0.42
# homeassistant.components.russound_rio
aiorussound==4.1.0
aiorussound==4.1.1
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -827,7 +827,7 @@ eliqonline==1.2.2
elkm1-lib==2.2.10
# homeassistant.components.elmax
elmax-api==0.0.6.3
elmax-api==0.0.6.4rc0
# homeassistant.components.elvia
elvia==0.1.0
@@ -1134,7 +1134,7 @@ hole==0.8.0
holidays==0.63
# homeassistant.components.frontend
home-assistant-frontend==20241224.0
home-assistant-frontend==20241230.0
# homeassistant.components.conversation
home-assistant-intents==2024.12.20
@@ -1260,7 +1260,7 @@ kiwiki-client==0.1.1
knocki==0.4.2
# homeassistant.components.knx
knx-frontend==2024.11.16.205004
knx-frontend==2024.12.26.233449
# homeassistant.components.konnected
konnected==1.2.0
@@ -1528,7 +1528,7 @@ oemthermostat==1.1.1
ohme==1.2.0
# homeassistant.components.ollama
ollama==0.3.3
ollama==0.4.5
# homeassistant.components.omnilogic
omnilogic==0.4.5
@@ -1570,7 +1570,7 @@ openwrt-luci-rpc==1.1.17
openwrt-ubus-rpc==0.0.2
# homeassistant.components.opower
opower==0.8.6
opower==0.8.7
# homeassistant.components.oralb
oralb-ble==0.17.6
@@ -2043,7 +2043,7 @@ pykwb==0.0.8
pylacrosse==0.4
# homeassistant.components.lamarzocco
pylamarzocco==1.4.2
pylamarzocco==1.4.5
# homeassistant.components.lastfm
pylast==5.1.0
@@ -2118,7 +2118,7 @@ pynetio==0.1.9.1
pynobo==1.8.1
# homeassistant.components.nordpool
pynordpool==0.2.3
pynordpool==0.2.4
# homeassistant.components.nuki
pynuki==1.6.3
@@ -2360,7 +2360,7 @@ python-gc100==1.0.3a0
python-gitlab==1.6.0
# homeassistant.components.analytics_insights
python-homeassistant-analytics==0.8.0
python-homeassistant-analytics==0.8.1
# homeassistant.components.homewizard
python-homewizard-energy==v7.0.0
@@ -2442,7 +2442,7 @@ python-vlc==3.0.18122
pythonegardia==1.0.52
# homeassistant.components.tile
pytile==2023.12.0
pytile==2024.12.0
# homeassistant.components.tomorrowio
pytomorrowio==0.3.6
@@ -2491,7 +2491,7 @@ pyvesync==2.1.12
pyvizio==0.1.61
# homeassistant.components.velux
pyvlx==0.2.21
pyvlx==0.2.26
# homeassistant.components.volumio
pyvolumio==0.1.5
@@ -2960,7 +2960,7 @@ venstarcolortouch==0.19
vilfo-api-client==0.5.0
# homeassistant.components.voip
voip-utils==0.2.1
voip-utils==0.2.2
# homeassistant.components.volkszaehler
volkszaehler==0.4.0
@@ -3082,7 +3082,7 @@ youless-api==2.1.2
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp[default]==2024.12.13
yt-dlp[default]==2024.12.23
# homeassistant.components.zabbix
zabbix-utils==2.0.1
+15 -15
View File
@@ -276,7 +276,7 @@ aiolookin==1.0.0
aiolyric==2.0.1
# homeassistant.components.mealie
aiomealie==0.9.4
aiomealie==0.9.5
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -303,7 +303,7 @@ aioopenexchangerates==0.6.8
aiooui==0.1.7
# homeassistant.components.pegel_online
aiopegelonline==0.1.0
aiopegelonline==0.1.1
# homeassistant.components.acmeda
aiopulse==0.4.6
@@ -338,7 +338,7 @@ aioridwell==2024.01.0
aioruckus==0.42
# homeassistant.components.russound_rio
aiorussound==4.1.0
aiorussound==4.1.1
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -702,7 +702,7 @@ elgato==5.1.2
elkm1-lib==2.2.10
# homeassistant.components.elmax
elmax-api==0.0.6.3
elmax-api==0.0.6.4rc0
# homeassistant.components.elvia
elvia==0.1.0
@@ -963,7 +963,7 @@ hole==0.8.0
holidays==0.63
# homeassistant.components.frontend
home-assistant-frontend==20241224.0
home-assistant-frontend==20241230.0
# homeassistant.components.conversation
home-assistant-intents==2024.12.20
@@ -1062,7 +1062,7 @@ kegtron-ble==0.4.0
knocki==0.4.2
# homeassistant.components.knx
knx-frontend==2024.11.16.205004
knx-frontend==2024.12.26.233449
# homeassistant.components.konnected
konnected==1.2.0
@@ -1276,7 +1276,7 @@ odp-amsterdam==6.0.2
ohme==1.2.0
# homeassistant.components.ollama
ollama==0.3.3
ollama==0.4.5
# homeassistant.components.omnilogic
omnilogic==0.4.5
@@ -1306,7 +1306,7 @@ openhomedevice==2.2.0
openwebifpy==4.3.0
# homeassistant.components.opower
opower==0.8.6
opower==0.8.7
# homeassistant.components.oralb
oralb-ble==0.17.6
@@ -1657,7 +1657,7 @@ pykrakenapi==0.1.8
pykulersky==0.5.2
# homeassistant.components.lamarzocco
pylamarzocco==1.4.2
pylamarzocco==1.4.5
# homeassistant.components.lastfm
pylast==5.1.0
@@ -1720,7 +1720,7 @@ pynetgear==0.10.10
pynobo==1.8.1
# homeassistant.components.nordpool
pynordpool==0.2.3
pynordpool==0.2.4
# homeassistant.components.nuki
pynuki==1.6.3
@@ -1902,7 +1902,7 @@ python-fullykiosk==0.0.14
# python-gammu==3.2.4
# homeassistant.components.analytics_insights
python-homeassistant-analytics==0.8.0
python-homeassistant-analytics==0.8.1
# homeassistant.components.homewizard
python-homewizard-energy==v7.0.0
@@ -1966,7 +1966,7 @@ python-technove==1.3.1
python-telegram-bot[socks]==21.5
# homeassistant.components.tile
pytile==2023.12.0
pytile==2024.12.0
# homeassistant.components.tomorrowio
pytomorrowio==0.3.6
@@ -2006,7 +2006,7 @@ pyvesync==2.1.12
pyvizio==0.1.61
# homeassistant.components.velux
pyvlx==0.2.21
pyvlx==0.2.26
# homeassistant.components.volumio
pyvolumio==0.1.5
@@ -2376,7 +2376,7 @@ venstarcolortouch==0.19
vilfo-api-client==0.5.0
# homeassistant.components.voip
voip-utils==0.2.1
voip-utils==0.2.2
# homeassistant.components.volvooncall
volvooncall==0.10.3
@@ -2477,7 +2477,7 @@ youless-api==2.1.2
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp[default]==2024.12.13
yt-dlp[default]==2024.12.23
# homeassistant.components.zamg
zamg==0.3.6
+45 -10
View File
@@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, patch
from elevenlabs.core import ApiError
from elevenlabs.types import GetVoicesResponse
from httpx import ConnectError
import pytest
from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE
@@ -34,21 +35,55 @@ def _client_mock():
@pytest.fixture
def mock_async_client() -> Generator[AsyncMock]:
"""Override async ElevenLabs client."""
with patch(
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
return_value=_client_mock(),
) as mock_async_client:
with (
patch(
"homeassistant.components.elevenlabs.AsyncElevenLabs",
return_value=_client_mock(),
) as mock_async_client,
patch(
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
new=mock_async_client,
),
):
yield mock_async_client
@pytest.fixture
def mock_async_client_fail() -> Generator[AsyncMock]:
def mock_async_client_api_error() -> Generator[AsyncMock]:
"""Override async ElevenLabs client with ApiError side effect."""
client_mock = _client_mock()
client_mock.models.get_all.side_effect = ApiError
client_mock.voices.get_all.side_effect = ApiError
with (
patch(
"homeassistant.components.elevenlabs.AsyncElevenLabs",
return_value=client_mock,
) as mock_async_client,
patch(
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
new=mock_async_client,
),
):
yield mock_async_client
@pytest.fixture
def mock_async_client_connect_error() -> Generator[AsyncMock]:
"""Override async ElevenLabs client."""
with patch(
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
return_value=_client_mock(),
) as mock_async_client:
mock_async_client.side_effect = ApiError
client_mock = _client_mock()
client_mock.models.get_all.side_effect = ConnectError("Unknown")
client_mock.voices.get_all.side_effect = ConnectError("Unknown")
with (
patch(
"homeassistant.components.elevenlabs.AsyncElevenLabs",
return_value=client_mock,
) as mock_async_client,
patch(
"homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs",
new=mock_async_client,
),
):
yield mock_async_client
@@ -2,6 +2,8 @@
from unittest.mock import AsyncMock
import pytest
from homeassistant.components.elevenlabs.const import (
CONF_CONFIGURE_VOICE,
CONF_MODEL,
@@ -56,7 +58,10 @@ async def test_user_step(
async def test_invalid_api_key(
hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
mock_async_client_api_error: AsyncMock,
request: pytest.FixtureRequest,
) -> None:
"""Test user step with invalid api key."""
@@ -77,8 +82,8 @@ async def test_invalid_api_key(
mock_setup_entry.assert_not_called()
# Reset the side effect
mock_async_client_fail.side_effect = None
# Use a working client
request.getfixturevalue("mock_async_client")
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
+36
View File
@@ -0,0 +1,36 @@
"""Tests for the ElevenLabs TTS entity."""
from __future__ import annotations
from unittest.mock import MagicMock
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def test_setup(
hass: HomeAssistant,
mock_async_client: MagicMock,
mock_entry: MockConfigEntry,
) -> None:
"""Test entry setup without any exceptions."""
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
assert mock_entry.state == ConfigEntryState.LOADED
# Unload
await hass.config_entries.async_unload(mock_entry.entry_id)
assert mock_entry.state == ConfigEntryState.NOT_LOADED
async def test_setup_connect_error(
hass: HomeAssistant,
mock_async_client_connect_error: MagicMock,
mock_entry: MockConfigEntry,
) -> None:
"""Test entry setup with a connection error."""
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
# Ensure is not ready
assert mock_entry.state == ConfigEntryState.SETUP_RETRY
+18
View File
@@ -11,6 +11,7 @@ from freezegun.api import FrozenDateTimeFactory
import pytest
from homeassistant.components.feedreader.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import Event, HomeAssistant
from homeassistant.helpers import device_registry as dr
import homeassistant.util.dt as dt_util
@@ -52,6 +53,23 @@ async def test_setup(
assert not events
async def test_setup_error(
hass: HomeAssistant,
feed_one_event,
) -> None:
"""Test setup error."""
entry = create_mock_entry(VALID_CONFIG_DEFAULT)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.feedreader.coordinator.feedparser.http.get"
) as feedreader:
feedreader.side_effect = urllib.error.URLError("Test")
feedreader.return_value = feed_one_event
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_storage_data_writing(
hass: HomeAssistant,
events: list[Event],
-8
View File
@@ -5,7 +5,6 @@ from __future__ import annotations
from collections.abc import AsyncGenerator
import json
from typing import Any
from unittest.mock import patch
from pynordpool import API, NordPoolClient
import pytest
@@ -20,13 +19,6 @@ from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.fixture(autouse=True)
async def no_sleep() -> AsyncGenerator[None]:
"""No sleeping."""
with patch("homeassistant.components.nordpool.coordinator.asyncio.sleep"):
yield
@pytest.fixture
async def load_int(hass: HomeAssistant, get_client: NordPoolClient) -> MockConfigEntry:
"""Set up the Nord Pool integration in Home Assistant."""
@@ -0,0 +1,229 @@
{
"deliveryDateCET": "2024-11-05",
"version": 2,
"updatedAt": "2024-11-04T11:58:10.7711584Z",
"deliveryAreas": ["NL"],
"market": "DayAhead",
"multiAreaEntries": [
{
"deliveryStart": "2024-11-04T23:00:00Z",
"deliveryEnd": "2024-11-05T00:00:00Z",
"entryPerArea": {
"NL": 83.63
}
},
{
"deliveryStart": "2024-11-05T00:00:00Z",
"deliveryEnd": "2024-11-05T01:00:00Z",
"entryPerArea": {
"NL": 94.0
}
},
{
"deliveryStart": "2024-11-05T01:00:00Z",
"deliveryEnd": "2024-11-05T02:00:00Z",
"entryPerArea": {
"NL": 90.68
}
},
{
"deliveryStart": "2024-11-05T02:00:00Z",
"deliveryEnd": "2024-11-05T03:00:00Z",
"entryPerArea": {
"NL": 91.3
}
},
{
"deliveryStart": "2024-11-05T03:00:00Z",
"deliveryEnd": "2024-11-05T04:00:00Z",
"entryPerArea": {
"NL": 94.0
}
},
{
"deliveryStart": "2024-11-05T04:00:00Z",
"deliveryEnd": "2024-11-05T05:00:00Z",
"entryPerArea": {
"NL": 96.09
}
},
{
"deliveryStart": "2024-11-05T05:00:00Z",
"deliveryEnd": "2024-11-05T06:00:00Z",
"entryPerArea": {
"NL": 106.0
}
},
{
"deliveryStart": "2024-11-05T06:00:00Z",
"deliveryEnd": "2024-11-05T07:00:00Z",
"entryPerArea": {
"NL": 135.99
}
},
{
"deliveryStart": "2024-11-05T07:00:00Z",
"deliveryEnd": "2024-11-05T08:00:00Z",
"entryPerArea": {
"NL": 136.21
}
},
{
"deliveryStart": "2024-11-05T08:00:00Z",
"deliveryEnd": "2024-11-05T09:00:00Z",
"entryPerArea": {
"NL": 118.23
}
},
{
"deliveryStart": "2024-11-05T09:00:00Z",
"deliveryEnd": "2024-11-05T10:00:00Z",
"entryPerArea": {
"NL": 105.87
}
},
{
"deliveryStart": "2024-11-05T10:00:00Z",
"deliveryEnd": "2024-11-05T11:00:00Z",
"entryPerArea": {
"NL": 95.28
}
},
{
"deliveryStart": "2024-11-05T11:00:00Z",
"deliveryEnd": "2024-11-05T12:00:00Z",
"entryPerArea": {
"NL": 94.92
}
},
{
"deliveryStart": "2024-11-05T12:00:00Z",
"deliveryEnd": "2024-11-05T13:00:00Z",
"entryPerArea": {
"NL": 99.25
}
},
{
"deliveryStart": "2024-11-05T13:00:00Z",
"deliveryEnd": "2024-11-05T14:00:00Z",
"entryPerArea": {
"NL": 107.98
}
},
{
"deliveryStart": "2024-11-05T14:00:00Z",
"deliveryEnd": "2024-11-05T15:00:00Z",
"entryPerArea": {
"NL": 149.86
}
},
{
"deliveryStart": "2024-11-05T15:00:00Z",
"deliveryEnd": "2024-11-05T16:00:00Z",
"entryPerArea": {
"NL": 303.24
}
},
{
"deliveryStart": "2024-11-05T16:00:00Z",
"deliveryEnd": "2024-11-05T17:00:00Z",
"entryPerArea": {
"NL": 472.99
}
},
{
"deliveryStart": "2024-11-05T17:00:00Z",
"deliveryEnd": "2024-11-05T18:00:00Z",
"entryPerArea": {
"NL": 431.02
}
},
{
"deliveryStart": "2024-11-05T18:00:00Z",
"deliveryEnd": "2024-11-05T19:00:00Z",
"entryPerArea": {
"NL": 320.33
}
},
{
"deliveryStart": "2024-11-05T19:00:00Z",
"deliveryEnd": "2024-11-05T20:00:00Z",
"entryPerArea": {
"NL": 169.7
}
},
{
"deliveryStart": "2024-11-05T20:00:00Z",
"deliveryEnd": "2024-11-05T21:00:00Z",
"entryPerArea": {
"NL": 129.9
}
},
{
"deliveryStart": "2024-11-05T21:00:00Z",
"deliveryEnd": "2024-11-05T22:00:00Z",
"entryPerArea": {
"NL": 117.77
}
},
{
"deliveryStart": "2024-11-05T22:00:00Z",
"deliveryEnd": "2024-11-05T23:00:00Z",
"entryPerArea": {
"NL": 110.03
}
}
],
"blockPriceAggregates": [
{
"blockName": "Off-peak 1",
"deliveryStart": "2024-11-04T23:00:00Z",
"deliveryEnd": "2024-11-05T07:00:00Z",
"averagePricePerArea": {
"NL": {
"average": 98.96,
"min": 83.63,
"max": 135.99
}
}
},
{
"blockName": "Peak",
"deliveryStart": "2024-11-05T07:00:00Z",
"deliveryEnd": "2024-11-05T19:00:00Z",
"averagePricePerArea": {
"NL": {
"average": 202.93,
"min": 94.92,
"max": 472.99
}
}
},
{
"blockName": "Off-peak 2",
"deliveryStart": "2024-11-05T19:00:00Z",
"deliveryEnd": "2024-11-05T23:00:00Z",
"averagePricePerArea": {
"NL": {
"average": 131.85,
"min": 110.03,
"max": 169.7
}
}
}
],
"currency": "EUR",
"exchangeRate": 1,
"areaStates": [
{
"state": "Final",
"areas": ["NL"]
}
],
"areaAverages": [
{
"areaCode": "NL",
"price": 156.43
}
]
}
@@ -55,7 +55,7 @@ async def test_coordinator(
freezer.tick(timedelta(hours=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
assert mock_data.call_count == 4
assert mock_data.call_count == 1
state = hass.states.get("sensor.nord_pool_se3_current_price")
assert state.state == STATE_UNAVAILABLE
@@ -69,7 +69,7 @@ async def test_coordinator(
freezer.tick(timedelta(hours=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
assert mock_data.call_count == 4
assert mock_data.call_count == 1
state = hass.states.get("sensor.nord_pool_se3_current_price")
assert state.state == STATE_UNAVAILABLE
assert "Authentication error" in caplog.text
@@ -84,7 +84,8 @@ async def test_coordinator(
freezer.tick(timedelta(hours=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
assert mock_data.call_count == 4
# Empty responses does not raise
assert mock_data.call_count == 3
state = hass.states.get("sensor.nord_pool_se3_current_price")
assert state.state == STATE_UNAVAILABLE
assert "Empty response" in caplog.text
@@ -99,7 +100,7 @@ async def test_coordinator(
freezer.tick(timedelta(hours=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
assert mock_data.call_count == 4
assert mock_data.call_count == 1
state = hass.states.get("sensor.nord_pool_se3_current_price")
assert state.state == STATE_UNAVAILABLE
assert "Response error" in caplog.text
+105 -2
View File
@@ -2,9 +2,11 @@
from __future__ import annotations
import json
from unittest.mock import patch
from pynordpool import (
API,
NordPoolClient,
NordPoolConnectionError,
NordPoolEmptyResponseError,
@@ -13,13 +15,17 @@ from pynordpool import (
)
import pytest
from homeassistant.components.nordpool.const import DOMAIN
from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
from homeassistant.const import CONF_CURRENCY
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import device_registry as dr, entity_registry as er
from . import ENTRY_CONFIG
from tests.common import MockConfigEntry
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00")
@@ -71,3 +77,100 @@ async def test_initial_startup_fails(
await hass.async_block_till_done(wait_background_tasks=True)
assert entry.state is ConfigEntryState.SETUP_RETRY
@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00")
async def test_reconfigure_cleans_up_device(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
get_client: NordPoolClient,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test clean up devices due to reconfiguration."""
nl_json_file = load_fixture("delivery_period_nl.json", DOMAIN)
load_nl_json = json.loads(nl_json_file)
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done(wait_background_tasks=True)
assert entry.state is ConfigEntryState.LOADED
assert device_registry.async_get_device(identifiers={(DOMAIN, "SE3")})
assert device_registry.async_get_device(identifiers={(DOMAIN, "SE4")})
assert entity_registry.async_get("sensor.nord_pool_se3_current_price")
assert entity_registry.async_get("sensor.nord_pool_se4_current_price")
assert hass.states.get("sensor.nord_pool_se3_current_price")
assert hass.states.get("sensor.nord_pool_se4_current_price")
aioclient_mock.clear_requests()
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-04",
"market": "DayAhead",
"deliveryArea": "NL",
"currency": "EUR",
},
json=load_nl_json,
)
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-05",
"market": "DayAhead",
"deliveryArea": "NL",
"currency": "EUR",
},
json=load_nl_json,
)
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-06",
"market": "DayAhead",
"deliveryArea": "NL",
"currency": "EUR",
},
json=load_nl_json,
)
result = await entry.start_reconfigure_flow(hass)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_AREAS: ["NL"],
CONF_CURRENCY: "EUR",
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reconfigure_successful"
assert entry.data == {
"areas": [
"NL",
],
"currency": "EUR",
}
await hass.async_block_till_done(wait_background_tasks=True)
assert device_registry.async_get_device(identifiers={(DOMAIN, "NL")})
assert entity_registry.async_get("sensor.nord_pool_nl_current_price")
assert hass.states.get("sensor.nord_pool_nl_current_price")
assert not device_registry.async_get_device(identifiers={(DOMAIN, "SE3")})
assert not entity_registry.async_get("sensor.nord_pool_se3_current_price")
assert not hass.states.get("sensor.nord_pool_se3_current_price")
assert not device_registry.async_get_device(identifiers={(DOMAIN, "SE4")})
assert not entity_registry.async_get("sensor.nord_pool_se4_current_price")
assert not hass.states.get("sensor.nord_pool_se4_current_price")
+138 -1
View File
@@ -2,14 +2,22 @@
from __future__ import annotations
from datetime import timedelta
from http import HTTPStatus
from typing import Any
from freezegun.api import FrozenDateTimeFactory
from pynordpool import API
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from tests.common import snapshot_platform
from tests.common import async_fire_time_changed, snapshot_platform
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00")
@@ -59,3 +67,132 @@ async def test_sensor_no_previous_price(
assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z
assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z
assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z
@pytest.mark.freeze_time("2024-11-05T11:00:01+01:00")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_sensor_empty_response(
hass: HomeAssistant,
load_int: ConfigEntry,
load_json: list[dict[str, Any]],
aioclient_mock: AiohttpClientMocker,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test the Nord Pool sensor with empty response."""
responses = list(load_json)
current_price = hass.states.get("sensor.nord_pool_se3_current_price")
last_price = hass.states.get("sensor.nord_pool_se3_previous_price")
next_price = hass.states.get("sensor.nord_pool_se3_next_price")
assert current_price is not None
assert last_price is not None
assert next_price is not None
assert current_price.state == "0.92737"
assert last_price.state == "1.03132"
assert next_price.state == "0.92505"
aioclient_mock.clear_requests()
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-04",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
json=responses[1],
)
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-05",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
json=responses[0],
)
# Future date without data should return 204
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-06",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
status=HTTPStatus.NO_CONTENT,
)
freezer.tick(timedelta(hours=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
# All prices should be known as tomorrow is not loaded by sensors
current_price = hass.states.get("sensor.nord_pool_se3_current_price")
last_price = hass.states.get("sensor.nord_pool_se3_previous_price")
next_price = hass.states.get("sensor.nord_pool_se3_next_price")
assert current_price is not None
assert last_price is not None
assert next_price is not None
assert current_price.state == "0.92505"
assert last_price.state == "0.92737"
assert next_price.state == "0.94949"
aioclient_mock.clear_requests()
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-04",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
json=responses[1],
)
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-05",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
json=responses[0],
)
# Future date without data should return 204
aioclient_mock.request(
"GET",
url=API + "/DayAheadPrices",
params={
"date": "2024-11-06",
"market": "DayAhead",
"deliveryArea": "SE3,SE4",
"currency": "SEK",
},
status=HTTPStatus.NO_CONTENT,
)
freezer.move_to("2024-11-05T22:00:01+00:00")
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
# Current and last price should be known, next price should be unknown
# as api responds with empty data (204)
current_price = hass.states.get("sensor.nord_pool_se3_current_price")
last_price = hass.states.get("sensor.nord_pool_se3_previous_price")
next_price = hass.states.get("sensor.nord_pool_se3_next_price")
assert current_price is not None
assert last_price is not None
assert next_price is not None
assert current_price.state == "0.28914"
assert last_price.state == "0.5223"
assert next_price.state == STATE_UNKNOWN
+2 -2
View File
@@ -51,8 +51,8 @@ async def test_chat(
assert args["model"] == "test model"
assert args["messages"] == [
Message({"role": "system", "content": prompt}),
Message({"role": "user", "content": "test message"}),
Message(role="system", content=prompt),
Message(role="user", content="test message"),
]
assert (
@@ -10,6 +10,7 @@ from homeassistant.components.pegel_online.const import (
DOMAIN,
MIN_TIME_BETWEEN_UPDATES,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.util import utcnow
@@ -24,6 +25,27 @@ from .const import (
from tests.common import MockConfigEntry, async_fire_time_changed
async def test_setup_error(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Tests error during config entry setup."""
entry = MockConfigEntry(
domain=DOMAIN,
data=MOCK_CONFIG_ENTRY_DATA_DRESDEN,
unique_id=MOCK_CONFIG_ENTRY_DATA_DRESDEN[CONF_STATION],
)
entry.add_to_hass(hass)
with patch("homeassistant.components.pegel_online.PegelOnline") as pegelonline:
pegelonline.return_value = PegelOnlineMock(
station_details=MOCK_STATION_DETAILS_DRESDEN,
station_measurements=MOCK_STATION_MEASUREMENT_DRESDEN,
)
pegelonline().override_side_effect(ClientError("Boom"))
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_update_error(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
@@ -2142,3 +2142,143 @@ async def test_stats_migrate_times(
)
await hass.async_stop()
@pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_cleanup_unmigrated_state_timestamps(
async_test_recorder: RecorderInstanceGenerator,
) -> None:
"""Ensure schema 48 migration cleans up any unmigrated state timestamps."""
importlib.import_module(SCHEMA_MODULE_32)
old_db_schema = sys.modules[SCHEMA_MODULE_32]
test_uuid = uuid.uuid4()
uuid_hex = test_uuid.hex
def _object_as_dict(obj):
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
def _insert_states():
with session_scope(hass=hass) as session:
state1 = old_db_schema.States(
entity_id="state.test_state1",
last_updated=datetime.datetime(
2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC
),
last_updated_ts=None,
last_changed=datetime.datetime(
2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC
),
last_changed_ts=None,
context_id=uuid_hex,
context_id_bin=None,
context_user_id=None,
context_user_id_bin=None,
context_parent_id=None,
context_parent_id_bin=None,
)
state2 = old_db_schema.States(
entity_id="state.test_state2",
last_updated=datetime.datetime(
2016, 10, 28, 20, 13, 52, 552529, tzinfo=datetime.UTC
),
last_updated_ts=None,
last_changed=datetime.datetime(
2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC
),
last_changed_ts=None,
context_id=None,
context_id_bin=None,
context_user_id=None,
context_user_id_bin=None,
context_parent_id=None,
context_parent_id_bin=None,
)
session.add_all((state1, state2))
# There is a default of now() for last_updated_ts so make sure it's not set
session.query(old_db_schema.States).update(
{old_db_schema.States.last_updated_ts: None}
)
state3 = old_db_schema.States(
entity_id="state.already_migrated",
last_updated=None,
last_updated_ts=1477685632.452529,
last_changed=None,
last_changed_ts=1477685632.452529,
context_id=uuid_hex,
context_id_bin=None,
context_user_id=None,
context_user_id_bin=None,
context_parent_id=None,
context_parent_id_bin=None,
)
session.add_all((state3,))
with session_scope(hass=hass, read_only=True) as session:
states = session.query(old_db_schema.States).all()
assert len(states) == 3
# Create database with old schema
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await instance.async_add_executor_job(_insert_states)
await async_wait_recording_done(hass)
now = dt_util.utcnow()
await _async_wait_migration_done(hass)
await async_wait_recording_done(hass)
await hass.async_stop()
await hass.async_block_till_done()
def _fetch_migrated_states():
with session_scope(hass=hass) as session:
states = session.query(States).all()
assert len(states) == 3
return {state.state_id: _object_as_dict(state) for state in states}
# Run again with new schema, let migration run
async with async_test_home_assistant() as hass:
with (
freeze_time(now),
instrument_migration(hass) as instrumented_migration,
):
async with async_test_recorder(
hass, wait_recorder=False, wait_recorder_setup=False
) as instance:
# Check the context ID migrator is considered non-live
assert recorder.util.async_migration_is_live(hass) is False
instrumented_migration.migration_stall.set()
instance.recorder_and_worker_thread_ids.add(threading.get_ident())
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
states_by_metadata_id = await instance.async_add_executor_job(
_fetch_migrated_states
)
await hass.async_stop()
await hass.async_block_till_done()
assert len(states_by_metadata_id) == 3
for state in states_by_metadata_id.values():
assert state["last_updated_ts"] is not None
by_entity_id = {
state["entity_id"]: state for state in states_by_metadata_id.values()
}
assert by_entity_id["state.test_state1"]["last_updated_ts"] == 1477685632.452529
assert by_entity_id["state.test_state2"]["last_updated_ts"] == 1477685632.552529
assert (
by_entity_id["state.already_migrated"]["last_updated_ts"] == 1477685632.452529
)
@@ -23,8 +23,7 @@
"platform": 2,
"transfers": 0,
"duration": "10",
"delay": 0,
"line": "T10"
"delay": 0
},
{
"departure": "2024-01-06T18:06:00+0100",
+41 -1
View File
@@ -5,7 +5,7 @@ import socket
from unittest.mock import Mock, patch
from freezegun.api import FrozenDateTimeFactory
from psutil._common import sdiskusage, shwtemp, snetio, snicaddr
from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -504,3 +504,43 @@ async def test_remove_obsolete_entities(
entity_registry.async_get("sensor.systemmonitor_network_out_veth54321")
is not None
)
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_no_duplicate_disk_entities(
hass: HomeAssistant,
mock_psutil: Mock,
mock_os: Mock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test the sensor."""
mock_psutil.disk_usage.return_value = sdiskusage(
500 * 1024**3, 300 * 1024**3, 200 * 1024**3, 60.0
)
mock_psutil.disk_partitions.return_value = [
sdiskpart("test", "/", "ext4", ""),
sdiskpart("test2", "/media/share", "ext4", ""),
sdiskpart("test3", "/incorrect", "", ""),
sdiskpart("test4", "/media/frigate", "ext4", ""),
sdiskpart("test4", "/media/FRIGATE", "ext4", ""),
sdiskpart("hosts", "/etc/hosts", "bind", ""),
sdiskpart("proc", "/proc/run", "proc", ""),
]
mock_config_entry = MockConfigEntry(
title="System Monitor",
domain=DOMAIN,
data={},
options={
"binary_sensor": {"process": ["python3", "pip"]},
},
)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
disk_sensor = hass.states.get("sensor.system_monitor_disk_usage_media_frigate")
assert disk_sensor is not None
assert disk_sensor.state == "60.0"
assert "Platform systemmonitor does not generate unique IDs." not in caplog.text