Compare commits

...

66 Commits

Author SHA1 Message Date
Franck Nijhof
3e6bc29a6a 2026.2.2 (#162950) 2026-02-13 21:05:06 +01:00
Franck Nijhof
ec8067a5a8 Bump version to 2026.2.2 2026-02-13 19:25:16 +00:00
Josef Zweck
6f47716d0a Log remaining token duration in onedrive (#162933) 2026-02-13 19:24:25 +00:00
puddly
efba5c6bcc Bump ZHA to 0.0.90 (#162894) 2026-02-13 19:24:24 +00:00
Sammy [Andrei Marinache]
d10e78079f Add Miele TQ1000WP tumble dryer programs and program phases (#162871)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Åke Strandberg <ake@strandberg.eu>
2026-02-13 19:24:23 +00:00
Jon Seager
6d4581580f Bump pytouchlinesl to 0.6.0 (#162856) 2026-02-13 19:24:21 +00:00
Yoshi Walsh
0d9a41a540 Bump pydaikin to 2.17.2 (#162846) 2026-02-13 19:24:20 +00:00
Vicx
cd69e6db73 Bump slixmpp to 1.13.2 (#162837) 2026-02-13 19:24:19 +00:00
Xitee
1320367d0d Filter out transient zero values from qBittorrent alltime stats (#162821)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 19:24:18 +00:00
Joost Lekkerkerker
dfa4698887 Bump pySmartThings to 3.5.2 (#162809)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2026-02-13 19:24:17 +00:00
Robert Resch
b426115de7 Bump cryptography to 46.0.5 (#162783) 2026-02-13 19:24:15 +00:00
hanwg
fb79fa37f8 Fix bug in edit_message_media action for Telegram bot (#162762) 2026-02-13 19:24:14 +00:00
Simone Chemelli
6a5f7bf424 Fix image platform state for Vodafone Station (#162747)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-13 19:24:13 +00:00
Simone Chemelli
142ca6dec1 Fix alarm refresh warning for Comelit SimpleHome (#162710) 2026-02-13 19:24:12 +00:00
epenet
0f986c24d0 Fix unavailable status in Tuya (#162709) 2026-02-13 19:24:11 +00:00
Josef Zweck
01f2b7b6f6 Bump onedrive-personal-sdk to 0.1.2 (#162689) 2026-02-13 19:24:09 +00:00
Michael
b9469027f5 Fix handling when FRITZ!Box reboots in FRITZ!Box Tools (#162679) 2026-02-13 19:24:08 +00:00
Tomás Correia
fbb94af748 fix to cloudflare r2 setup screen info (#162677) 2026-02-13 19:24:07 +00:00
Michael
148bdf6e3a Fix handling when FRITZ!Box reboots in FRITZ!Smarthome (#162676) 2026-02-13 19:24:05 +00:00
starkillerOG
91999f8871 Bump reolink-aio to 0.19.0 (#162672) 2026-02-13 19:24:04 +00:00
Jeef
aecca4eb99 Bump intellifire4py to 4.3.1 (#162659) 2026-02-13 19:24:03 +00:00
Allen Porter
bf8aa49bae Improve MCP SSE fallback error handling (#162655) 2026-02-13 19:24:02 +00:00
Joost Lekkerkerker
4423425683 Pin setuptools to 81.0.0 (#162589)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-13 19:24:01 +00:00
Aaron Godfrey
44202da53d Increase max tasks retrieved per page to prevent timeout (#162587) 2026-02-13 19:23:59 +00:00
Thomas55555
9f7dfb72c4 Bump aioautomower to 2.7.3 (#162583)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-13 19:23:58 +00:00
Michael
de07a69e4f Bump aioimmich to 0.12.0 (#162573)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-13 19:23:57 +00:00
Maikel Punie
bbf4c38115 migrate velbus config entries (#162565) 2026-02-13 19:23:56 +00:00
ElCruncharino
e1bb5d52ef Add timeout to B2 metadata downloads to prevent backup hang (#162562) 2026-02-13 19:23:54 +00:00
hanwg
eb64b6bdee Fix config flow bug for Telegram bot (#162555) 2026-02-13 19:23:53 +00:00
Andrea Turri
ecb288b735 Add new Miele mappings (#162544) 2026-02-13 19:23:52 +00:00
Norbert Rittel
a419c9c420 Sentence-case "speech-to-text" in google_cloud (#162534) 2026-02-13 19:23:51 +00:00
Brett Adams
dd29133324 Fix Tesla Fleet partner registration to use all regions (#162525)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 19:23:50 +00:00
Allen Porter
90f22ea516 Bump grpc to 1.78.0 (#162520) 2026-02-13 19:23:48 +00:00
Peter Grauvogel
9db1428265 Fix Green Planet Energy price unit conversion (#162511) 2026-02-13 19:23:47 +00:00
Denis Shulyaka
a696b05b0d Fix JSON serialization of time objects in Cloud conversation tool results (#162506) 2026-02-13 19:23:46 +00:00
Denis Shulyaka
77ddb63b73 Fix JSON serialization of time objects in Open Router tool results (#162505) 2026-02-13 19:23:44 +00:00
Denis Shulyaka
4180a6e176 Fix JSON serialization of time objects in Ollama tool results (#162502)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-13 19:23:43 +00:00
Denis Shulyaka
6d74c912d2 Fix JSON serialization of datetime objects in Google Generative AI tool results (#162495) 2026-02-13 19:23:42 +00:00
Denis Shulyaka
8a01dfcc00 Fix JSON serialization of time objects in OpenAI tool results (#162490) 2026-02-13 19:23:40 +00:00
Brett Adams
9722898dc6 Fix device_class of backup reserve sensor in Tessie (#162459) 2026-02-13 19:23:39 +00:00
Brett Adams
7438c71fcb Fix device_class of backup reserve sensor in teslemetry (#162458) 2026-02-13 19:23:38 +00:00
Christian Lackas
0b5e55b923 Fix absolute humidity sensor on HmIP-WGT glass thermostats (#162455) 2026-02-13 19:23:37 +00:00
ElCruncharino
61ed959e8e Fix AsyncIteratorReader blocking after stream exhaustion (#161731) 2026-02-13 19:17:20 +00:00
Jaap Pieroen
3989532465 Bump essent-dynamic-pricing to 0.3.1 (#160958)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-02-13 19:17:18 +00:00
Franck Nijhof
28027ddca4 2026.2.1 (#162450) 2026-02-06 22:44:07 +01:00
Franck Nijhof
fe0d7b3cca Bump version to 2026.2.1 2026-02-06 20:49:26 +00:00
jameson_uk
0dcc4e9527 dep: bump aioamazondevices to 11.1.3 (#162437) 2026-02-06 20:47:38 +00:00
Artur Pragacz
b13b189703 Make bad entity ID detection more lenient (#162425) 2026-02-06 20:47:37 +00:00
epenet
150829f599 Fix invalid yardian snaphots (#162422) 2026-02-06 20:47:36 +00:00
Joost Lekkerkerker
57dd9d9c23 Remove double unit of measurement for yardian (#162412) 2026-02-06 20:47:34 +00:00
Sab44
e2056cb12c Bump librehardwaremonitor-api to version 1.9.1 (#162409) 2026-02-06 20:47:33 +00:00
Joost Lekkerkerker
fa2c8992cf Remove entity id overwrite for ambient station (#162403) 2026-02-06 20:47:32 +00:00
Matt Zimmerman
ddf5c7fe3a Add missing config flow strings to SmartTub (#162375)
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-06 20:47:31 +00:00
Matt Zimmerman
7034ed6d3f Bump python-smarttub to 0.0.47 (#162367)
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-06 20:47:29 +00:00
Aaron Godfrey
9015b53c1b Fix conversion of data for todo.* actions (#162366) 2026-02-06 20:47:28 +00:00
Jordan Harvey
1cfa6561f7 Update pynintendoparental requirement to version 2.3.2.1 (#162362) 2026-02-06 20:47:27 +00:00
Shay Levy
eead02dcca Fix Shelly Linkedgo Thermostat status update (#162339) 2026-02-06 20:47:26 +00:00
Arie Catsman
456e51a221 Bump pyenphase to 2.4.5 (#162324) 2026-02-06 20:47:25 +00:00
Luo Chen
5d984ce186 Fix unicode escaping in MCP server tool response (#162319)
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-02-06 20:47:24 +00:00
Oliver
61f45489ac Add mapping for stopped state to denonavr media player (#162283) 2026-02-06 20:47:23 +00:00
Tomás Correia
f72c643b38 Fix multipart upload to use consistent part sizes for R2/S3 (#162278) 2026-02-06 20:47:22 +00:00
Oliver
27bc26e886 Bump denonavr to 1.3.2 (#162271) 2026-02-06 20:47:20 +00:00
Thomas55555
0e9f03cbc1 Bump google_air_quality_api to 3.0.1 (#162233) 2026-02-06 20:47:19 +00:00
David Bonnes
9480c33fb0 Bump evohome-async to 1.1.3 (#162232) 2026-02-06 20:47:18 +00:00
Jonathan
3e6b8663e8 Fix device_class of backup reserve sensor (#161178) 2026-02-06 20:47:17 +00:00
epenet
1c69a83793 Fix redundant off preset in Tuya climate (#161040) 2026-02-06 20:47:16 +00:00
121 changed files with 1941 additions and 536 deletions

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==11.1.1"]
"requirements": ["aioamazondevices==11.1.3"]
}

View File

@@ -26,10 +26,9 @@ from homeassistant.const import (
UnitOfVolumetricFlux,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AmbientStation, AmbientStationConfigEntry
from . import AmbientStationConfigEntry
from .const import ATTR_LAST_DATA, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX
from .entity import AmbientWeatherEntity
@@ -683,22 +682,6 @@ async def async_setup_entry(
class AmbientWeatherSensor(AmbientWeatherEntity, SensorEntity):
"""Define an Ambient sensor."""
def __init__(
self,
ambient: AmbientStation,
mac_address: str,
station_name: str,
description: EntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(ambient, mac_address, station_name, description)
if description.key == TYPE_SOLARRADIATION_LX:
# Since TYPE_SOLARRADIATION and TYPE_SOLARRADIATION_LX will have the same
# name in the UI, we influence the entity ID of TYPE_SOLARRADIATION_LX here
# to differentiate them:
self.entity_id = f"sensor.{station_name}_solar_rad_lx"
@callback
def update_from_latest_data(self) -> None:
"""Fetch new state data for the sensor."""

View File

@@ -16,12 +16,18 @@ CONNECTION_TIMEOUT = 120 # 2 minutes
# Default TIMEOUT_FOR_UPLOAD is 128 seconds, which is too short for large backups
TIMEOUT_FOR_UPLOAD = 43200 # 12 hours
# Reduced retry count for download operations
# Default is 20 retries with exponential backoff, which can hang for 30+ minutes
# when there are persistent connection errors (e.g., SSL failures)
TRY_COUNT_DOWNLOAD = 3
class B2Http(BaseB2Http): # type: ignore[misc]
"""B2Http with extended timeouts for backup operations."""
CONNECTION_TIMEOUT = CONNECTION_TIMEOUT
TIMEOUT_FOR_UPLOAD = TIMEOUT_FOR_UPLOAD
TRY_COUNT_DOWNLOAD = TRY_COUNT_DOWNLOAD
class B2Session(BaseB2Session): # type: ignore[misc]

View File

@@ -40,6 +40,10 @@ CACHE_TTL = 300
# This prevents uploads from hanging indefinitely
UPLOAD_TIMEOUT = 43200 # 12 hours (matches B2 HTTP timeout)
# Timeout for metadata download operations (in seconds)
# This prevents the backup system from hanging when B2 connections fail
METADATA_DOWNLOAD_TIMEOUT = 60
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
"""Return the suggested filenames for the backup and metadata files."""
@@ -413,12 +417,21 @@ class BackblazeBackupAgent(BackupAgent):
backups = {}
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
backup = await self._hass.async_add_executor_job(
self._process_metadata_file_sync,
file_name,
file_version,
all_files_in_prefix,
)
try:
backup = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._process_metadata_file_sync,
file_name,
file_version,
all_files_in_prefix,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
_LOGGER.warning(
"Timeout downloading metadata file %s", file_name
)
continue
if backup:
backups[backup.backup_id] = backup
self._backup_list_cache = backups
@@ -442,10 +455,18 @@ class BackblazeBackupAgent(BackupAgent):
if not file or not metadata_file_version:
raise BackupNotFound(f"Backup {backup_id} not found")
metadata_content = await self._hass.async_add_executor_job(
self._download_and_parse_metadata_sync,
metadata_file_version,
)
try:
metadata_content = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._download_and_parse_metadata_sync,
metadata_file_version,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
raise BackupAgentError(
f"Timeout downloading metadata for backup {backup_id}"
) from None
_LOGGER.debug(
"Successfully retrieved metadata for backup ID %s from file %s",
@@ -468,16 +489,27 @@ class BackblazeBackupAgent(BackupAgent):
# Process metadata files sequentially to avoid exhausting executor pool
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
(
result_backup_file,
result_metadata_file_version,
) = await self._hass.async_add_executor_job(
self._process_metadata_file_for_id_sync,
file_name,
file_version,
backup_id,
all_files_in_prefix,
)
try:
(
result_backup_file,
result_metadata_file_version,
) = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._process_metadata_file_for_id_sync,
file_name,
file_version,
backup_id,
all_files_in_prefix,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
_LOGGER.warning(
"Timeout downloading metadata file %s while searching for backup %s",
file_name,
backup_id,
)
continue
if result_backup_file and result_metadata_file_version:
return result_backup_file, result_metadata_file_version

View File

@@ -50,6 +50,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import llm
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.json import json_dumps
from homeassistant.util import slugify
from .client import CloudClient
@@ -93,7 +94,7 @@ def _convert_content_to_param(
{
"type": "function_call_output",
"call_id": content.tool_call_id,
"output": json.dumps(content.tool_result),
"output": json_dumps(content.tool_result),
}
)
continue
@@ -125,7 +126,7 @@ def _convert_content_to_param(
{
"type": "function_call",
"name": tool_call.tool_name,
"arguments": json.dumps(tool_call.tool_args),
"arguments": json_dumps(tool_call.tool_args),
"call_id": tool_call.id,
}
)

View File

@@ -196,44 +196,46 @@ class R2BackupAgent(BackupAgent):
)
upload_id = multipart_upload["UploadId"]
try:
parts = []
parts: list[dict[str, Any]] = []
part_number = 1
buffer_size = 0 # bytes
buffer: list[bytes] = []
buffer = bytearray() # bytes buffer to store the data
stream = await open_stream()
async for chunk in stream:
buffer_size += len(chunk)
buffer.append(chunk)
buffer.extend(chunk)
# upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
# all non-trailing parts have the same size (required by S3/R2)
while len(buffer) >= MULTIPART_MIN_PART_SIZE_BYTES:
part_data = bytes(buffer[:MULTIPART_MIN_PART_SIZE_BYTES])
del buffer[:MULTIPART_MIN_PART_SIZE_BYTES]
# If buffer size meets minimum part size, upload it as a part
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
_LOGGER.debug(
"Uploading part number %d, size %d", part_number, buffer_size
"Uploading part number %d, size %d",
part_number,
len(part_data),
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
Body=part_data,
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
buffer_size = 0
buffer = []
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, buffer_size
"Uploading final part number %d, size %d", part_number, len(buffer)
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
Body=bytes(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})

View File

@@ -19,11 +19,11 @@
"secret_access_key": "Secret access key"
},
"data_description": {
"access_key_id": "Access key ID to connect to Cloudflare R2 (this is your Account ID)",
"access_key_id": "Access key ID to connect to Cloudflare R2",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"endpoint_url": "Cloudflare R2 S3-compatible endpoint.",
"prefix": "Optional folder path inside the bucket. Example: backups/homeassistant",
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Docs]({auth_docs_url})"
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Cloudflare documentation]({auth_docs_url})"
},
"title": "Add Cloudflare R2 bucket"
}

View File

@@ -144,7 +144,7 @@ class ComelitAlarmEntity(
"""Update state after action."""
self._area.human_status = area_state
self._area.armed = armed
await self.async_update_ha_state()
self.async_write_ha_state()
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Send disarm command."""

View File

@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.17.1"],
"requirements": ["pydaikin==2.17.2"],
"zeroconf": ["_dkapi._tcp.local."]
}

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["denonavr"],
"requirements": ["denonavr==1.3.1"],
"requirements": ["denonavr==1.3.2"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",

View File

@@ -17,6 +17,7 @@ from denonavr.const import (
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_STOPPED,
)
from denonavr.exceptions import (
AvrCommandError,
@@ -103,6 +104,7 @@ DENON_STATE_MAPPING = {
STATE_OFF: MediaPlayerState.OFF,
STATE_PLAYING: MediaPlayerState.PLAYING,
STATE_PAUSED: MediaPlayerState.PAUSED,
STATE_STOPPED: MediaPlayerState.IDLE,
}

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"quality_scale": "platinum",
"requirements": ["pyenphase==2.4.3"],
"requirements": ["pyenphase==2.4.5"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"requirements": ["essent-dynamic-pricing==0.2.7"],
"requirements": ["essent-dynamic-pricing==0.3.1"],
"single_config_entry": true
}

View File

@@ -4,7 +4,7 @@
"codeowners": ["@zxdavb"],
"documentation": "https://www.home-assistant.io/integrations/evohome",
"iot_class": "cloud_polling",
"loggers": ["evohome", "evohomeasync", "evohomeasync2"],
"loggers": ["evohomeasync", "evohomeasync2"],
"quality_scale": "legacy",
"requirements": ["evohome-async==1.0.6"]
"requirements": ["evohome-async==1.1.3"]
}

View File

@@ -278,6 +278,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"call_deflections"
] = await self.async_update_call_deflections()
except FRITZ_EXCEPTIONS as ex:
_LOGGER.debug(
"Reload %s due to error '%s' to ensure proper re-login",
self.config_entry.title,
ex,
)
self.hass.config_entries.async_schedule_reload(self.config_entry.entry_id)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, UnitOfTemperature
from homeassistant.core import Event, HomeAssistant
@@ -57,7 +59,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: FritzboxConfigEntry) ->
async def async_unload_entry(hass: HomeAssistant, entry: FritzboxConfigEntry) -> bool:
"""Unloading the AVM FRITZ!SmartHome platforms."""
await hass.async_add_executor_job(entry.runtime_data.fritz.logout)
try:
await hass.async_add_executor_job(entry.runtime_data.fritz.logout)
except (RequestConnectionError, HTTPError) as ex:
LOGGER.debug("logout failed with '%s', anyway continue with unload", ex)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -121,26 +121,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
def _update_fritz_devices(self) -> FritzboxCoordinatorData:
"""Update all fritzbox device data."""
try:
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
except RequestConnectionError as ex:
raise UpdateFailed from ex
except HTTPError:
# If the device rebooted, login again
try:
self.fritz.login()
except LoginError as ex:
raise ConfigEntryAuthFailed from ex
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
devices = self.fritz.get_devices()
device_data = {}
@@ -193,7 +178,18 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
async def _async_update_data(self) -> FritzboxCoordinatorData:
"""Fetch all device data."""
new_data = await self.hass.async_add_executor_job(self._update_fritz_devices)
try:
new_data = await self.hass.async_add_executor_job(
self._update_fritz_devices
)
except (RequestConnectionError, HTTPError) as ex:
LOGGER.debug(
"Reload %s due to error '%s' to ensure proper re-login",
self.config_entry.title,
ex,
)
self.hass.config_entries.async_schedule_reload(self.config_entry.entry_id)
raise UpdateFailed from ex
for device in new_data.devices.values():
# create device registry entry for new main devices

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["google_air_quality_api"],
"quality_scale": "bronze",
"requirements": ["google_air_quality_api==3.0.0"]
"requirements": ["google_air_quality_api==3.0.1"]
}

View File

@@ -23,7 +23,7 @@
"pitch": "Default pitch of the voice",
"profiles": "Default audio profiles",
"speed": "Default rate/speed of the voice",
"stt_model": "Speech-to-Text model",
"stt_model": "Speech-to-text model",
"text_type": "Default text type",
"voice": "Default voice name (overrides language and gender)"
}

View File

@@ -7,6 +7,7 @@ import base64
import codecs
from collections.abc import AsyncGenerator, AsyncIterator, Callable
from dataclasses import dataclass, replace
import datetime
import mimetypes
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal, cast
@@ -181,13 +182,25 @@ def _escape_decode(value: Any) -> Any:
return value
def _validate_tool_results(value: Any) -> Any:
"""Recursively convert non-json-serializable types."""
if isinstance(value, (datetime.time, datetime.date)):
return value.isoformat()
if isinstance(value, list):
return [_validate_tool_results(item) for item in value]
if isinstance(value, dict):
return {k: _validate_tool_results(v) for k, v in value.items()}
return value
def _create_google_tool_response_parts(
parts: list[conversation.ToolResultContent],
) -> list[Part]:
"""Create Google tool response parts."""
return [
Part.from_function_response(
name=tool_result.tool_name, response=tool_result.tool_result
name=tool_result.tool_name,
response=_validate_tool_results(tool_result.tool_result),
)
for tool_result in parts
]

View File

@@ -38,7 +38,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
translation_key="highest_price_today",
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
suggested_display_precision=4,
value_fn=lambda api, data: api.get_highest_price_today(data),
value_fn=lambda api, data: (
price / 100
if (price := api.get_highest_price_today(data)) is not None
else None
),
),
GreenPlanetEnergySensorEntityDescription(
key="gpe_lowest_price_day",
@@ -46,7 +50,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
suggested_display_precision=4,
translation_placeholders={"time_range": "(06:00-18:00)"},
value_fn=lambda api, data: api.get_lowest_price_day(data),
value_fn=lambda api, data: (
price / 100
if (price := api.get_lowest_price_day(data)) is not None
else None
),
),
GreenPlanetEnergySensorEntityDescription(
key="gpe_lowest_price_night",
@@ -54,14 +62,22 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
suggested_display_precision=4,
translation_placeholders={"time_range": "(18:00-06:00)"},
value_fn=lambda api, data: api.get_lowest_price_night(data),
value_fn=lambda api, data: (
price / 100
if (price := api.get_lowest_price_night(data)) is not None
else None
),
),
GreenPlanetEnergySensorEntityDescription(
key="gpe_current_price",
translation_key="current_price",
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
suggested_display_precision=4,
value_fn=lambda api, data: api.get_current_price(data, dt_util.now().hour),
value_fn=lambda api, data: (
price / 100
if (price := api.get_current_price(data, dt_util.now().hour)) is not None
else None
),
),
]

View File

@@ -556,16 +556,8 @@ class HomematicipAbsoluteHumiditySensor(HomematicipGenericEntity, SensorEntity):
@property
def native_value(self) -> float | None:
"""Return the state."""
if self.functional_channel is None:
return None
value = self.functional_channel.vaporAmount
# Handle case where value might be None
if (
self.functional_channel.vaporAmount is None
or self.functional_channel.vaporAmount == ""
):
value = self._device.vaporAmount
if value is None or value == "":
return None
return round(value, 3)

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_push",
"loggers": ["aioautomower"],
"quality_scale": "silver",
"requirements": ["aioautomower==2.7.1"]
"requirements": ["aioautomower==2.7.3"]
}

View File

@@ -9,5 +9,5 @@
"iot_class": "local_polling",
"loggers": ["aioimmich"],
"quality_scale": "silver",
"requirements": ["aioimmich==0.11.1"]
"requirements": ["aioimmich==0.12.0"]
}

View File

@@ -12,5 +12,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["intellifire4py"],
"requirements": ["intellifire4py==4.2.1"]
"requirements": ["intellifire4py==4.3.1"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.8.4"]
"requirements": ["librehardwaremonitor-api==1.9.1"]
}

View File

@@ -7,6 +7,7 @@ import datetime
import logging
import httpx
from mcp import McpError
from mcp.client.session import ClientSession
from mcp.client.sse import sse_client
from mcp.client.streamable_http import streamable_http_client
@@ -63,10 +64,15 @@ async def mcp_client(
# Method not Allowed likely means this is not a streamable HTTP server,
# but it may be an SSE server. This is part of the MCP Transport
# backwards compatibility specification.
# We also handle other generic McpErrors since proxies may not respond
# consistently with a 405.
if (
isinstance(main_error, httpx.HTTPStatusError)
and main_error.response.status_code == 405
):
) or isinstance(main_error, McpError):
_LOGGER.debug(
"Streamable HTTP client failed, attempting SSE client: %s", main_error
)
try:
async with (
sse_client(url=url, headers=headers) as streams,

View File

@@ -110,7 +110,7 @@ async def create_server(
return [
types.TextContent(
type="text",
text=json.dumps(tool_response),
text=json.dumps(tool_response, ensure_ascii=False),
)
]

View File

@@ -177,15 +177,15 @@ class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
not_running = 0, 512, 535, 536, 537, 65535
program_running = 513
drying = 514
drying = 514, 11018
machine_iron = 515
hand_iron_2 = 516
normal = 517
normal_plus = 518
cooling_down = 519
hand_iron_1 = 520
anti_crease = 521
finished = 522
anti_crease = 521, 11029
finished = 522, 11012
extra_dry = 523
hand_iron = 524
moisten = 526
@@ -193,12 +193,14 @@ class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
timed_drying = 528
warm_air = 529
steam_smoothing = 530
comfort_cooling = 531
comfort_cooling = 531, 11055
rinse_out_lint = 532
rinses = 533
smoothing = 534
slightly_dry = 538
safety_cooling = 539
automatic_start = 11044
perfect_dry_active = 11054
class ProgramPhaseWasherDryer(MieleEnum, missing_to_none=True):
@@ -265,6 +267,8 @@ class ProgramPhaseOven(MieleEnum, missing_to_none=True):
heating_up = 3073
process_running = 3074
process_finished = 3078
searing = 3080
roasting = 3081
energy_save = 3084
pre_heating = 3099
@@ -357,6 +361,8 @@ class ProgramPhaseSteamOvenCombi(MieleEnum, missing_to_none=True):
heating_up = 3073
process_running = 3074, 7938
process_finished = 3078, 7942
searing = 3080
roasting = 3081
energy_save = 3084
pre_heating = 3099
@@ -505,30 +511,58 @@ class TumbleDryerProgramId(MieleEnum, missing_to_none=True):
no_program = 0, -1
automatic_plus = 1
cottons = 2, 20, 90
minimum_iron = 3, 30
woollens_handcare = 4, 40
delicates = 5, 50
warm_air = 6, 60
cool_air = 7, 70
express = 8, 80
cottons = 2, 20, 90, 10001
minimum_iron = 3, 30, 10016
woollens_handcare = 4, 40, 10081
woollens = 10040
delicates = 5, 50, 10022
warm_air = 6, 60, 10025
cool_air = 7, 70, 10027
express = 8, 80, 10028
cottons_eco = 9, 99003
proofing = 12, 120
denim = 13, 130
proofing = 12, 120, 10057
denim = 13, 130, 10039
shirts = 14, 99004
sportswear = 15, 150
outerwear = 16, 160
silks_handcare = 17, 170
sportswear = 15, 150, 10052
outerwear = 16, 160, 10049
silks_handcare = 17, 170, 10082
standard_pillows = 19, 190
basket_program = 22, 220
basket_program = 22, 220, 10072
cottons_hygiene = 11, 23
smoothing = 24, 240
bed_linen = 31, 99002
eco = 66
smoothing = 24, 240, 10073
bed_linen = 31, 99002, 10047
eco = 66, 10079
gentle_smoothing = 10, 100
gentle_denim = 131
steam_smoothing = 99001
large_pillows = 99005
downs_duvets = 10050
curtains = 10055
quick_power_dry = 10032
automatic = 10044
quick_hygiene = 10076
hygiene = 10080
pillows_sanitize = 10092
custom_program_1 = 13901
custom_program_2 = 13902
custom_program_3 = 13903
custom_program_4 = 13904
custom_program_5 = 13905
custom_program_6 = 13906
custom_program_7 = 13907
custom_program_8 = 13908
custom_program_9 = 13909
custom_program_10 = 13910
custom_program_11 = 13911
custom_program_12 = 13912
custom_program_13 = 13913
custom_program_14 = 13914
custom_program_15 = 13915
custom_program_16 = 13916
custom_program_17 = 13917
custom_program_18 = 13918
custom_program_19 = 13919
custom_program_20 = 13920
class OvenProgramId(MieleEnum, missing_to_none=True):

View File

@@ -461,6 +461,7 @@
"dissolve_gelatine": "Dissolve gelatine",
"down_duvets": "Down duvets",
"down_filled_items": "Down-filled items",
"downs_duvets": "Downs/Duvets",
"drain_spin": "Drain/spin",
"drop_cookies_1_tray": "Drop cookies (1 tray)",
"drop_cookies_2_trays": "Drop cookies (2 trays)",
@@ -665,6 +666,7 @@
"pike_fillet": "Pike (fillet)",
"pike_piece": "Pike (piece)",
"pillows": "Pillows",
"pillows_sanitize": "Pillows sanitize",
"pinto_beans": "Pinto beans",
"pizza_oil_cheese_dough_baking_tray": "Pizza, oil cheese dough (baking tray)",
"pizza_oil_cheese_dough_round_baking_tine": "Pizza, oil cheese dough (round baking tine)",
@@ -732,8 +734,8 @@
"potatoes_waxy_whole_small": "Potatoes (waxy, whole, small)",
"poularde_breast": "Poularde breast",
"poularde_whole": "Poularde (whole)",
"power_fresh": "PowerFresh",
"power_wash": "PowerWash",
"powerfresh": "PowerFresh",
"prawns": "Prawns",
"pre_ironing": "Pre-ironing",
"proofing": "Proofing",
@@ -746,7 +748,9 @@
"pumpkin_soup": "Pumpkin soup",
"pyrolytic": "Pyrolytic",
"quiche_lorraine": "Quiche Lorraine",
"quick_hygiene": "QuickHygiene",
"quick_mw": "Quick MW",
"quick_power_dry": "QuickPowerDry",
"quick_power_wash": "QuickPowerWash",
"quinces_diced": "Quinces (diced)",
"quinoa": "Quinoa",
@@ -1004,6 +1008,7 @@
"normal": "Normal",
"normal_plus": "Normal plus",
"not_running": "Not running",
"perfect_dry_active": "PerfectDry active",
"pre_brewing": "Pre-brewing",
"pre_dishwash": "Pre-cleaning",
"pre_heating": "Pre-heating",
@@ -1018,7 +1023,9 @@
"rinse_hold": "Rinse hold",
"rinse_out_lint": "Rinse out lint",
"rinses": "Rinses",
"roasting": "Roasting",
"safety_cooling": "Safety cooling",
"searing": "Searing",
"slightly_dry": "Slightly dry",
"slow_roasting": "Slow roasting",
"smoothing": "Smoothing",

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["pynintendoauth", "pynintendoparental"],
"quality_scale": "bronze",
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.2"]
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.2.1"]
}

View File

@@ -16,6 +16,7 @@ from homeassistant.config_entries import ConfigSubentry
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.json import json_dumps
from . import OllamaConfigEntry
from .const import (
@@ -93,7 +94,7 @@ def _convert_content(
if isinstance(chat_content, conversation.ToolResultContent):
return ollama.Message(
role=MessageRole.TOOL.value,
content=json.dumps(chat_content.tool_result),
content=json_dumps(chat_content.tool_result),
)
if isinstance(chat_content, conversation.AssistantContent):
return ollama.Message(

View File

@@ -148,6 +148,12 @@ class OneDriveBackupAgent(BackupAgent):
**kwargs: Any,
) -> None:
"""Upload a backup."""
expires_at = self._entry.data["token"]["expires_at"]
_LOGGER.debug(
"Starting backup upload, token expiry: %s (in %s seconds)",
expires_at,
expires_at - time(),
)
backup_filename, metadata_filename = suggested_filenames(backup)
file = FileInfo(
backup_filename,

View File

@@ -6,6 +6,7 @@ from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from datetime import timedelta
import logging
from time import time
from onedrive_personal_sdk import OneDriveClient
from onedrive_personal_sdk.const import DriveState
@@ -58,6 +59,12 @@ class OneDriveUpdateCoordinator(DataUpdateCoordinator[Drive]):
async def _async_update_data(self) -> Drive:
"""Fetch data from API endpoint."""
expires_at = self.config_entry.data["token"]["expires_at"]
_LOGGER.debug(
"Token expiry: %s (in %s seconds)",
expires_at,
expires_at - time(),
)
try:
drive = await self._client.get_drive()

View File

@@ -10,5 +10,5 @@
"iot_class": "cloud_polling",
"loggers": ["onedrive_personal_sdk"],
"quality_scale": "platinum",
"requirements": ["onedrive-personal-sdk==0.1.1"]
"requirements": ["onedrive-personal-sdk==0.1.2"]
}

View File

@@ -34,6 +34,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.json import json_dumps
from . import OpenRouterConfigEntry
from .const import DOMAIN, LOGGER
@@ -109,7 +110,7 @@ def _convert_content_to_chat_message(
return ChatCompletionToolMessageParam(
role="tool",
tool_call_id=content.tool_call_id,
content=json.dumps(content.tool_result),
content=json_dumps(content.tool_result),
)
role: Literal["user", "assistant", "system"] = content.role
@@ -130,7 +131,7 @@ def _convert_content_to_chat_message(
type="function",
id=tool_call.id,
function=Function(
arguments=json.dumps(tool_call.tool_args),
arguments=json_dumps(tool_call.tool_args),
name=tool_call.tool_name,
),
)

View File

@@ -64,6 +64,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, issue_registry as ir, llm
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.json import json_dumps
from homeassistant.util import slugify
from .const import (
@@ -183,7 +184,7 @@ def _convert_content_to_param(
FunctionCallOutput(
type="function_call_output",
call_id=content.tool_call_id,
output=json.dumps(content.tool_result),
output=json_dumps(content.tool_result),
)
)
continue
@@ -217,7 +218,7 @@ def _convert_content_to_param(
ResponseFunctionToolCallParam(
type="function_call",
name=tool_call.tool_name,
arguments=json.dumps(tool_call.tool_args),
arguments=json_dumps(tool_call.tool_args),
call_id=tool_call.id,
)
)

View File

@@ -76,27 +76,29 @@ def get_upload_speed(coordinator: QBittorrentDataCoordinator) -> int:
def get_download_speed_limit(coordinator: QBittorrentDataCoordinator) -> int:
"""Get current download speed."""
"""Get current download speed limit."""
server_state = cast(Mapping, coordinator.data.get("server_state"))
return cast(int, server_state.get("dl_rate_limit"))
def get_upload_speed_limit(coordinator: QBittorrentDataCoordinator) -> int:
"""Get current upload speed."""
"""Get current upload speed limit."""
server_state = cast(Mapping[str, Any], coordinator.data.get("server_state"))
return cast(int, server_state.get("up_rate_limit"))
def get_alltime_download(coordinator: QBittorrentDataCoordinator) -> int:
"""Get current download speed."""
def get_alltime_download(coordinator: QBittorrentDataCoordinator) -> int | None:
"""Get all-time download volume."""
server_state = cast(Mapping, coordinator.data.get("server_state"))
return cast(int, server_state.get("alltime_dl"))
value = cast(int, server_state.get("alltime_dl"))
return value or None
def get_alltime_upload(coordinator: QBittorrentDataCoordinator) -> int:
"""Get current download speed."""
def get_alltime_upload(coordinator: QBittorrentDataCoordinator) -> int | None:
"""Get all-time upload volume."""
server_state = cast(Mapping, coordinator.data.get("server_state"))
return cast(int, server_state.get("alltime_ul"))
value = cast(int, server_state.get("alltime_ul"))
return value or None
def get_global_ratio(coordinator: QBittorrentDataCoordinator) -> float:

View File

@@ -20,5 +20,5 @@
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"quality_scale": "platinum",
"requirements": ["reolink-aio==0.18.2"]
"requirements": ["reolink-aio==0.19.0"]
}

View File

@@ -87,11 +87,12 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="zoom",
cmd_key="GetZoomFocus",
cmd_id=294,
translation_key="zoom",
mode=NumberMode.SLIDER,
native_step=1,
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["min"],
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["max"],
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["min"],
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["max"],
supported=lambda api, ch: api.supported(ch, "zoom"),
value=lambda api, ch: api.get_zoom(ch),
method=lambda api, ch, value: api.set_zoom(ch, int(value)),
@@ -99,11 +100,12 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="focus",
cmd_key="GetZoomFocus",
cmd_id=294,
translation_key="focus",
mode=NumberMode.SLIDER,
native_step=1,
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["min"],
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["max"],
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["min"],
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["max"],
supported=lambda api, ch: api.supported(ch, "focus"),
value=lambda api, ch: api.get_focus(ch),
method=lambda api, ch, value: api.set_focus(ch, int(value)),

View File

@@ -61,6 +61,7 @@ class ReolinkHostSensorEntityDescription(
SENSORS = (
ReolinkSensorEntityDescription(
key="ptz_pan_position",
cmd_id=433,
cmd_key="GetPtzCurPos",
translation_key="ptz_pan_position",
state_class=SensorStateClass.MEASUREMENT,
@@ -70,6 +71,7 @@ SENSORS = (
),
ReolinkSensorEntityDescription(
key="ptz_tilt_position",
cmd_id=433,
cmd_key="GetPtzCurPos",
translation_key="ptz_tilt_position",
state_class=SensorStateClass.MEASUREMENT,

View File

@@ -104,7 +104,6 @@ class RpcLinkedgoThermostatClimate(ShellyRpcAttributeEntity, ClimateEntity):
)
config = coordinator.device.config
self._status = coordinator.device.status
self._attr_min_temp = config[key]["min"]
self._attr_max_temp = config[key]["max"]
@@ -142,6 +141,11 @@ class RpcLinkedgoThermostatClimate(ShellyRpcAttributeEntity, ClimateEntity):
THERMOSTAT_TO_HA_MODE[mode] for mode in modes
]
@property
def _status(self) -> dict[str, Any]:
"""Return the full device status."""
return self.coordinator.device.status
@property
def current_humidity(self) -> float | None:
"""Return the current humidity."""

View File

@@ -31,5 +31,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmartthings"],
"quality_scale": "bronze",
"requirements": ["pysmartthings==3.5.1"]
"requirements": ["pysmartthings==3.5.2"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/smarttub",
"iot_class": "cloud_polling",
"loggers": ["smarttub"],
"requirements": ["python-smarttub==0.0.46"]
"requirements": ["python-smarttub==0.0.47"]
}

View File

@@ -9,6 +9,14 @@
},
"step": {
"reauth_confirm": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "[%key:component::smarttub::config::step::user::data_description::email%]",
"password": "[%key:component::smarttub::config::step::user::data_description::password%]"
},
"description": "The SmartTub integration needs to re-authenticate your account",
"title": "[%key:common::config_flow::title::reauth%]"
},
@@ -17,6 +25,10 @@
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "The email address associated with your SmartTub account",
"password": "The password for your SmartTub account"
},
"description": "Enter your SmartTub email address and password to log in",
"title": "Login"
}

View File

@@ -659,23 +659,41 @@ class TelegramNotificationService:
media: InputMedia
if media_type == InputMediaType.ANIMATION:
media = InputMediaAnimation(file_content, caption=kwargs.get(ATTR_CAPTION))
media = InputMediaAnimation(
file_content,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
)
elif media_type == InputMediaType.AUDIO:
media = InputMediaAudio(file_content, caption=kwargs.get(ATTR_CAPTION))
media = InputMediaAudio(
file_content,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
)
elif media_type == InputMediaType.DOCUMENT:
media = InputMediaDocument(file_content, caption=kwargs.get(ATTR_CAPTION))
media = InputMediaDocument(
file_content,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
)
elif media_type == InputMediaType.PHOTO:
media = InputMediaPhoto(file_content, caption=kwargs.get(ATTR_CAPTION))
media = InputMediaPhoto(
file_content,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
)
else:
media = InputMediaVideo(file_content, caption=kwargs.get(ATTR_CAPTION))
media = InputMediaVideo(
file_content,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
)
return await self._send_msg(
self.bot.edit_message_media,
"Error editing message media",
params[ATTR_MESSAGE_TAG],
media=media,
caption=kwargs.get(ATTR_CAPTION),
parse_mode=params[ATTR_PARSER],
chat_id=chat_id,
message_id=message_id,
inline_message_id=inline_message_id,

View File

@@ -237,9 +237,9 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
# validate connection to Telegram API
errors: dict[str, str] = {}
user_input[CONF_API_ENDPOINT] = (
user_input[SECTION_ADVANCED_SETTINGS][CONF_API_ENDPOINT],
)
user_input[CONF_API_ENDPOINT] = user_input[SECTION_ADVANCED_SETTINGS][
CONF_API_ENDPOINT
]
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
CONF_PROXY_URL
)

View File

@@ -10,11 +10,7 @@ from typing import Any, cast
import jwt
from tesla_fleet_api import TeslaFleetApi
from tesla_fleet_api.const import SERVERS
from tesla_fleet_api.exceptions import (
InvalidResponse,
PreconditionFailed,
TeslaFleetError,
)
from tesla_fleet_api.exceptions import PreconditionFailed, TeslaFleetError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
@@ -41,12 +37,9 @@ class OAuth2FlowHandler(
"""Initialize config flow."""
super().__init__()
self.domain: str | None = None
self.registration_status: dict[str, bool] = {}
self.tesla_apis: dict[str, TeslaFleetApi] = {}
self.failed_regions: list[str] = []
self.data: dict[str, Any] = {}
self.uid: str | None = None
self.api: TeslaFleetApi | None = None
self.apis: list[TeslaFleetApi] = []
@property
def logger(self) -> logging.Logger:
@@ -64,7 +57,6 @@ class OAuth2FlowHandler(
self.data = data
self.uid = token["sub"]
server = SERVERS[token["ou_code"].lower()]
await self.async_set_unique_id(self.uid)
if self.source == SOURCE_REAUTH:
@@ -74,24 +66,28 @@ class OAuth2FlowHandler(
)
self._abort_if_unique_id_configured()
# OAuth done, setup a Partner API connection
# OAuth done, setup Partner API connections for all regions
implementation = cast(TeslaUserImplementation, self.flow_impl)
session = async_get_clientsession(self.hass)
self.api = TeslaFleetApi(
access_token="",
session=session,
server=server,
partner_scope=True,
charging_scope=False,
energy_scope=False,
user_scope=False,
vehicle_scope=False,
)
await self.api.get_private_key(self.hass.config.path("tesla_fleet.key"))
await self.api.partner_login(
implementation.client_id, implementation.client_secret
)
for region, server_url in SERVERS.items():
if region == "cn":
continue
api = TeslaFleetApi(
session=session,
access_token="",
server=server_url,
partner_scope=True,
charging_scope=False,
energy_scope=False,
user_scope=False,
vehicle_scope=False,
)
await api.get_private_key(self.hass.config.path("tesla_fleet.key"))
await api.partner_login(
implementation.client_id, implementation.client_secret
)
self.apis.append(api)
return await self.async_step_domain_input()
@@ -130,44 +126,67 @@ class OAuth2FlowHandler(
async def async_step_domain_registration(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle domain registration for both regions."""
"""Handle domain registration for all regions."""
assert self.api
assert self.api.private_key
assert self.apis
assert self.apis[0].private_key
assert self.domain
errors = {}
errors: dict[str, str] = {}
description_placeholders = {
"public_key_url": f"https://{self.domain}/.well-known/appspecific/com.tesla.3p.public-key.pem",
"pem": self.api.public_pem,
"pem": self.apis[0].public_pem,
}
try:
register_response = await self.api.partner.register(self.domain)
except PreconditionFailed:
return await self.async_step_domain_input(
errors={CONF_DOMAIN: "precondition_failed"}
)
except InvalidResponse:
successful_response: dict[str, Any] | None = None
failed_regions: list[str] = []
for api in self.apis:
try:
register_response = await api.partner.register(self.domain)
except PreconditionFailed:
return await self.async_step_domain_input(
errors={CONF_DOMAIN: "precondition_failed"}
)
except TeslaFleetError as e:
LOGGER.warning(
"Partner registration failed for %s: %s",
api.server,
e.message,
)
failed_regions.append(api.server or "unknown")
else:
if successful_response is None:
successful_response = register_response
if successful_response is None:
errors["base"] = "invalid_response"
except TeslaFleetError as e:
errors["base"] = "unknown_error"
description_placeholders["error"] = e.message
else:
# Get public key from response
registered_public_key = register_response.get("response", {}).get(
"public_key"
return self.async_show_form(
step_id="domain_registration",
description_placeholders=description_placeholders,
errors=errors,
)
if not registered_public_key:
errors["base"] = "public_key_not_found"
elif (
registered_public_key.lower()
!= self.api.public_uncompressed_point.lower()
):
errors["base"] = "public_key_mismatch"
else:
return await self.async_step_registration_complete()
if failed_regions:
LOGGER.warning(
"Partner registration succeeded on some regions but failed on: %s",
", ".join(failed_regions),
)
# Verify public key from the successful response
registered_public_key = successful_response.get("response", {}).get(
"public_key"
)
if not registered_public_key:
errors["base"] = "public_key_not_found"
elif (
registered_public_key.lower()
!= self.apis[0].public_uncompressed_point.lower()
):
errors["base"] = "public_key_mismatch"
else:
return await self.async_step_registration_complete()
return self.async_show_form(
step_id="domain_registration",

View File

@@ -436,7 +436,6 @@ ENERGY_INFO_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="vpp_backup_reserve_percent",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
),
SensorEntityDescription(key="version"),

View File

@@ -1529,7 +1529,6 @@ ENERGY_INFO_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="vpp_backup_reserve_percent",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
),
SensorEntityDescription(

View File

@@ -364,7 +364,6 @@ ENERGY_INFO_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
TessieSensorEntityDescription(
key="vpp_backup_reserve_percent",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
),
)

View File

@@ -93,4 +93,7 @@ COLLABORATORS: Final = "collaborators"
DOMAIN: Final = "todoist"
# Maximum number of items per page for Todoist API requests
MAX_PAGE_SIZE: Final = 200
SERVICE_NEW_TASK: Final = "new_task"

View File

@@ -1,5 +1,6 @@
"""DataUpdateCoordinator for the Todoist component."""
import asyncio
from collections.abc import AsyncGenerator
from datetime import timedelta
import logging
@@ -12,6 +13,8 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import MAX_PAGE_SIZE
T = TypeVar("T")
@@ -53,26 +56,30 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
async def _async_update_data(self) -> list[Task]:
"""Fetch tasks from the Todoist API."""
try:
tasks_async = await self.api.get_tasks()
tasks_async = await self.api.get_tasks(limit=MAX_PAGE_SIZE)
return await flatten_async_pages(tasks_async)
except asyncio.CancelledError:
raise
except Exception as err:
raise UpdateFailed(f"Error communicating with API: {err}") from err
return await flatten_async_pages(tasks_async)
async def async_get_projects(self) -> list[Project]:
"""Return todoist projects fetched at most once."""
if self._projects is None:
projects_async = await self.api.get_projects()
projects_async = await self.api.get_projects(limit=MAX_PAGE_SIZE)
self._projects = await flatten_async_pages(projects_async)
return self._projects
async def async_get_sections(self, project_id: str) -> list[Section]:
"""Return todoist sections for a given project ID."""
sections_async = await self.api.get_sections(project_id=project_id)
sections_async = await self.api.get_sections(
project_id=project_id, limit=MAX_PAGE_SIZE
)
return await flatten_async_pages(sections_async)
async def async_get_labels(self) -> list[Label]:
"""Return todoist labels fetched at most once."""
if self._labels is None:
labels_async = await self.api.get_labels()
labels_async = await self.api.get_labels(limit=MAX_PAGE_SIZE)
self._labels = await flatten_async_pages(labels_async)
return self._labels

View File

@@ -45,9 +45,9 @@ def _task_api_data(item: TodoItem, api_data: Task | None = None) -> dict[str, An
}
if due := item.due:
if isinstance(due, datetime.datetime):
item_data["due_datetime"] = due.isoformat()
item_data["due_datetime"] = due
else:
item_data["due_date"] = due.isoformat()
item_data["due_date"] = due
# In order to not lose any recurrence metadata for the task, we need to
# ensure that we send the `due_string` param if the task has it set.
# NOTE: It's ok to send stale data for non-recurring tasks. Any provided

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/touchline_sl",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["pytouchlinesl==0.5.0"]
"requirements": ["pytouchlinesl==0.6.0"]
}

View File

@@ -48,6 +48,7 @@ TUYA_HVAC_TO_HA = {
"heat": HVACMode.HEAT,
"hot": HVACMode.HEAT,
"manual": HVACMode.HEAT_COOL,
"off": HVACMode.OFF,
"wet": HVACMode.DRY,
"wind": HVACMode.FAN_ONLY,
}
@@ -442,7 +443,9 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
if hvac_mode_wrapper:
self._attr_hvac_modes = [HVACMode.OFF]
for mode in hvac_mode_wrapper.options:
self._attr_hvac_modes.append(HVACMode(mode))
if mode != HVACMode.OFF:
# OFF is always added first
self._attr_hvac_modes.append(HVACMode(mode))
elif switch_wrapper:
self._attr_hvac_modes = [

View File

@@ -51,9 +51,14 @@ class DeviceWrapper[T]:
) -> bool:
"""Determine if the wrapper should skip an update.
The default is to always skip, unless overridden in subclasses.
The default is to always skip if updated properties is given,
unless overridden in subclasses.
"""
return True
# If updated_status_properties is None, we should not skip,
# as we don't have information on what was updated
# This happens for example on online/offline updates, where
# we still want to update the entity state
return updated_status_properties is not None
def read_device_status(self, device: CustomerDevice) -> T | None:
"""Read device status and convert to a Home Assistant value."""
@@ -88,9 +93,13 @@ class DPCodeWrapper(DeviceWrapper):
By default, skip if updated_status_properties is given and
does not include this dpcode.
"""
# If updated_status_properties is None, we should not skip,
# as we don't have information on what was updated
# This happens for example on online/offline updates, where
# we still want to update the entity state
return (
updated_status_properties is None
or self.dpcode not in updated_status_properties
updated_status_properties is not None
and self.dpcode not in updated_status_properties
)
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
@@ -250,6 +259,13 @@ class DPCodeDeltaIntegerWrapper(DPCodeIntegerWrapper):
Processes delta accumulation before determining if update should be skipped.
"""
# If updated_status_properties is None, we should not skip,
# as we don't have information on what was updated
# This happens for example on online/offline updates, where
# we still want to update the entity state but we have nothing
# to accumulate, so we return False to not skip the update
if updated_status_properties is None:
return False
if (
super().skip_update(device, updated_status_properties, dp_timestamps)
or dp_timestamps is None

View File

@@ -143,16 +143,6 @@ async def async_migrate_entry(
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
)
# This is the config entry migration for adding the new program selection
# migrate from 1.x to 2.1
if config_entry.version < 2:
# clean the velbusCache
cache_path = hass.config.path(
STORAGE_DIR, f"velbuscache-{config_entry.entry_id}/"
)
if os.path.isdir(cache_path):
await hass.async_add_executor_job(shutil.rmtree, cache_path)
# This is the config entry migration for swapping the usb unique id to the serial number
# migrate from 2.1 to 2.2
if (
@@ -166,8 +156,20 @@ async def async_migrate_entry(
if len(parts) == 4:
hass.config_entries.async_update_entry(config_entry, unique_id=parts[1])
# This is the config entry migration for adding the new program selection
# migrate from < 2 to 2.1
# This is the config entry migration for adding the new properties
# migrate from < 3 to 3.2
if config_entry.version < 3:
# clean the velbusCache
cache_path = hass.config.path(
STORAGE_DIR, f"velbuscache-{config_entry.entry_id}/"
)
if os.path.isdir(cache_path):
await hass.async_add_executor_job(shutil.rmtree, cache_path)
# update the config entry
hass.config_entries.async_update_entry(config_entry, version=2, minor_version=2)
hass.config_entries.async_update_entry(config_entry, version=3, minor_version=2)
_LOGGER.error(
"Migration to version %s.%s successful",

View File

@@ -36,7 +36,7 @@ class InvalidVlpFile(HomeAssistantError):
class VelbusConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 2
VERSION = 3
MINOR_VERSION = 2
def __init__(self) -> None:

View File

@@ -12,6 +12,7 @@ from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util import dt as dt_util
from .const import _LOGGER
from .coordinator import VodafoneConfigEntry, VodafoneStationRouter
@@ -75,9 +76,11 @@ class VodafoneGuestWifiQRImage(
self.entity_description = description
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.serial_number}-{description.key}-qr-code"
self._cached_qr_code: bytes | None = None
async def async_image(self) -> bytes | None:
"""Return QR code image bytes."""
@property
def _qr_code(self) -> bytes:
"""Return QR code bytes."""
qr_code = cast(
BytesIO,
self.coordinator.data.wifi[WIFI_DATA][self.entity_description.key][
@@ -85,3 +88,24 @@ class VodafoneGuestWifiQRImage(
],
)
return qr_code.getvalue()
async def async_added_to_hass(self) -> None:
"""Set the update time."""
self._attr_image_last_updated = dt_util.utcnow()
await super().async_added_to_hass()
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator.
If the coordinator has updated the QR code, we can update the image.
"""
qr_code = self._qr_code
if self._cached_qr_code != qr_code:
self._cached_qr_code = qr_code
self._attr_image_last_updated = dt_util.utcnow()
super()._handle_coordinator_update()
async def async_image(self) -> bytes | None:
"""Return QR code image."""
return self._qr_code

View File

@@ -6,5 +6,5 @@
"iot_class": "cloud_push",
"loggers": ["pyasn1", "slixmpp"],
"quality_scale": "legacy",
"requirements": ["slixmpp==1.12.0", "emoji==2.8.0"]
"requirements": ["slixmpp==1.13.2", "emoji==2.8.0"]
}

View File

@@ -61,7 +61,6 @@ SENSOR_DESCRIPTIONS: tuple[YardianSensorEntityDescription, ...] = (
YardianSensorEntityDescription(
key="active_zone_count",
translation_key="active_zone_count",
native_unit_of_measurement="zones",
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda coordinator: len(coordinator.data.active_zones),
),

View File

@@ -38,7 +38,7 @@
"sensor": {
"active_zone_count": {
"name": "Active zones",
"unit_of_measurement": "Zones"
"unit_of_measurement": "zones"
},
"rain_delay": {
"name": "Rain delay"

View File

@@ -23,7 +23,7 @@
"universal_silabs_flasher",
"serialx"
],
"requirements": ["zha==0.0.89", "serialx==0.6.2"],
"requirements": ["zha==0.0.90", "serialx==0.6.2"],
"usb": [
{
"description": "*2652*",

View File

@@ -17,7 +17,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2026
MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "0"
PATCH_VERSION: Final = "2"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)

View File

@@ -42,6 +42,7 @@ from . import device_registry as dr, entity_registry as er, service, translation
from .deprecation import deprecated_function
from .entity_registry import EntityRegistry, RegistryEntryDisabler, RegistryEntryHider
from .event import async_call_later
from .frame import report_usage
from .issue_registry import IssueSeverity, async_create_issue
from .typing import UNDEFINED, ConfigType, DiscoveryInfoType, VolDictType, VolSchemaType
@@ -822,13 +823,28 @@ class EntityPlatform:
# An entity may suggest the entity_id by setting entity_id itself
if not hasattr(entity, "internal_integration_suggested_object_id"):
if entity.entity_id is not None and not valid_entity_id(entity.entity_id):
if entity.unique_id is not None:
report_usage(
f"sets an invalid entity ID: '{entity.entity_id}'. "
"In most cases, entities should not set entity_id,"
" but if they do, it should be a valid entity ID.",
integration_domain=self.platform_name,
breaks_in_ha_version="2027.2.0",
)
else:
entity.add_to_platform_abort()
raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}")
try:
entity.internal_integration_suggested_object_id = (
split_entity_id(entity.entity_id)[1]
if entity.entity_id is not None
else None
)
except ValueError:
entity.add_to_platform_abort()
raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}")
entity.internal_integration_suggested_object_id = (
split_entity_id(entity.entity_id)[1]
if entity.entity_id is not None
else None
)
raise HomeAssistantError(
f"Invalid entity ID: {entity.entity_id}"
) from None
# Get entity_id from unique ID registration
if entity.unique_id is not None:

View File

@@ -29,7 +29,7 @@ cached-ipaddress==1.0.1
certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
cryptography==46.0.2
cryptography==46.0.5
dbus-fast==3.1.2
file-read-backwards==2.0.0
fnv-hash-fast==1.6.0
@@ -89,9 +89,9 @@ httplib2>=0.19.0
# gRPC is an implicit dependency that we want to make explicit so we manage
# upgrades intentionally. It is a large package to build from source and we
# want to ensure we have wheels built.
grpcio==1.75.1
grpcio-status==1.75.1
grpcio-reflection==1.75.1
grpcio==1.78.0
grpcio-status==1.78.0
grpcio-reflection==1.78.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
@@ -235,3 +235,6 @@ aiomqtt>=2.5.0
# used by sharkiq==1.5.0
# https://github.com/auth0/auth0-python/releases/tag/5.0.0
auth0-python<5.0
# Setuptools >=82.0.0 doesn't contain pkg_resources anymore
setuptools<82.0.0

View File

@@ -28,6 +28,7 @@ class AsyncIteratorReader:
) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._exhausted = False
self._loop = loop
self._stream = stream
self._buffer: bytes | None = None
@@ -51,6 +52,8 @@ class AsyncIteratorReader:
"""
result = bytearray()
while n < 0 or len(result) < n:
if self._exhausted:
break
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._loop
@@ -65,6 +68,7 @@ class AsyncIteratorReader:
self._pos = 0
if not self._buffer:
# The stream is exhausted
self._exhausted = True
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2026.2.0"
version = "2026.2.2"
license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3."
@@ -58,7 +58,7 @@ dependencies = [
"lru-dict==1.3.0",
"PyJWT==2.10.1",
# PyJWT has loose dependency. We want the latest one.
"cryptography==46.0.2",
"cryptography==46.0.5",
"Pillow==12.0.0",
"propcache==0.4.1",
"pyOpenSSL==25.3.0",

2
requirements.txt generated
View File

@@ -21,7 +21,7 @@ bcrypt==5.0.0
certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
cryptography==46.0.2
cryptography==46.0.5
fnv-hash-fast==1.6.0
ha-ffmpeg==3.2.2
hass-nabucasa==1.12.0

38
requirements_all.txt generated
View File

@@ -190,7 +190,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.5
# homeassistant.components.alexa_devices
aioamazondevices==11.1.1
aioamazondevices==11.1.3
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -209,7 +209,7 @@ aioaseko==1.0.0
aioasuswrt==1.5.4
# homeassistant.components.husqvarna_automower
aioautomower==2.7.1
aioautomower==2.7.3
# homeassistant.components.azure_devops
aioazuredevops==2.2.2
@@ -293,7 +293,7 @@ aiohue==4.8.0
aioimaplib==2.0.1
# homeassistant.components.immich
aioimmich==0.11.1
aioimmich==0.12.0
# homeassistant.components.apache_kafka
aiokafka==0.10.0
@@ -797,7 +797,7 @@ deluge-client==1.10.2
demetriek==1.3.0
# homeassistant.components.denonavr
denonavr==1.3.1
denonavr==1.3.2
# homeassistant.components.devialet
devialet==1.5.7
@@ -926,7 +926,7 @@ eq3btsmart==2.3.0
esphome-dashboard-api==1.3.0
# homeassistant.components.essent
essent-dynamic-pricing==0.2.7
essent-dynamic-pricing==0.3.1
# homeassistant.components.netgear_lte
eternalegypt==0.0.18
@@ -938,7 +938,7 @@ eufylife-ble-client==0.1.8
# evdev==1.6.1
# homeassistant.components.evohome
evohome-async==1.0.6
evohome-async==1.1.3
# homeassistant.components.bryant_evolution
evolutionhttp==0.0.18
@@ -1105,7 +1105,7 @@ google-nest-sdm==9.1.2
google-photos-library-api==0.12.1
# homeassistant.components.google_air_quality
google_air_quality_api==3.0.0
google_air_quality_api==3.0.1
# homeassistant.components.slide
# homeassistant.components.slide_local
@@ -1303,7 +1303,7 @@ inkbird-ble==1.1.1
insteon-frontend-home-assistant==0.6.1
# homeassistant.components.intellifire
intellifire4py==4.2.1
intellifire4py==4.3.1
# homeassistant.components.iometer
iometer==0.3.0
@@ -1397,7 +1397,7 @@ libpyfoscamcgi==0.0.9
libpyvivotek==0.6.1
# homeassistant.components.libre_hardware_monitor
librehardwaremonitor-api==1.8.4
librehardwaremonitor-api==1.9.1
# homeassistant.components.mikrotik
librouteros==3.2.0
@@ -1653,7 +1653,7 @@ omnilogic==0.4.5
ondilo==0.5.0
# homeassistant.components.onedrive
onedrive-personal-sdk==0.1.1
onedrive-personal-sdk==0.1.2
# homeassistant.components.onvif
onvif-zeep-async==4.0.4
@@ -1974,7 +1974,7 @@ pycsspeechtts==1.0.8
pycync==0.5.0
# homeassistant.components.daikin
pydaikin==2.17.1
pydaikin==2.17.2
# homeassistant.components.danfoss_air
pydanfossair==0.1.0
@@ -2032,7 +2032,7 @@ pyegps==0.2.5
pyemoncms==0.1.3
# homeassistant.components.enphase_envoy
pyenphase==2.4.3
pyenphase==2.4.5
# homeassistant.components.envisalink
pyenvisalink==4.7
@@ -2251,7 +2251,7 @@ pynina==1.0.2
pynintendoauth==1.0.2
# homeassistant.components.nintendo_parental_controls
pynintendoparental==2.3.2
pynintendoparental==2.3.2.1
# homeassistant.components.nobo_hub
pynobo==1.8.1
@@ -2434,7 +2434,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.3
# homeassistant.components.smartthings
pysmartthings==3.5.1
pysmartthings==3.5.2
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -2597,7 +2597,7 @@ python-ripple-api==0.0.3
python-roborock==4.8.0
# homeassistant.components.smarttub
python-smarttub==0.0.46
python-smarttub==0.0.47
# homeassistant.components.snoo
python-snoo==0.8.3
@@ -2636,7 +2636,7 @@ pytomorrowio==0.3.6
pytouchline_extended==0.4.5
# homeassistant.components.touchline_sl
pytouchlinesl==0.5.0
pytouchlinesl==0.6.0
# homeassistant.components.traccar
# homeassistant.components.traccar_server
@@ -2754,7 +2754,7 @@ renault-api==0.5.3
renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.18.2
reolink-aio==0.19.0
# homeassistant.components.idteck_prox
rfk101py==0.0.1
@@ -2891,7 +2891,7 @@ skyboxremote==0.0.6
slack_sdk==3.33.4
# homeassistant.components.xmpp
slixmpp==1.12.0
slixmpp==1.13.2
# homeassistant.components.smart_meter_texas
smart-meter-texas==0.5.5
@@ -3296,7 +3296,7 @@ zeroconf==0.148.0
zeversolar==0.3.2
# homeassistant.components.zha
zha==0.0.89
zha==0.0.90
# homeassistant.components.zhong_hong
zhong-hong-hvac==1.0.13

View File

@@ -181,7 +181,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.5
# homeassistant.components.alexa_devices
aioamazondevices==11.1.1
aioamazondevices==11.1.3
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -200,7 +200,7 @@ aioaseko==1.0.0
aioasuswrt==1.5.4
# homeassistant.components.husqvarna_automower
aioautomower==2.7.1
aioautomower==2.7.3
# homeassistant.components.azure_devops
aioazuredevops==2.2.2
@@ -281,7 +281,7 @@ aiohue==4.8.0
aioimaplib==2.0.1
# homeassistant.components.immich
aioimmich==0.11.1
aioimmich==0.12.0
# homeassistant.components.apache_kafka
aiokafka==0.10.0
@@ -706,7 +706,7 @@ deluge-client==1.10.2
demetriek==1.3.0
# homeassistant.components.denonavr
denonavr==1.3.1
denonavr==1.3.2
# homeassistant.components.devialet
devialet==1.5.7
@@ -817,7 +817,7 @@ eq3btsmart==2.3.0
esphome-dashboard-api==1.3.0
# homeassistant.components.essent
essent-dynamic-pricing==0.2.7
essent-dynamic-pricing==0.3.1
# homeassistant.components.netgear_lte
eternalegypt==0.0.18
@@ -826,7 +826,7 @@ eternalegypt==0.0.18
eufylife-ble-client==0.1.8
# homeassistant.components.evohome
evohome-async==1.0.6
evohome-async==1.1.3
# homeassistant.components.bryant_evolution
evolutionhttp==0.0.18
@@ -981,7 +981,7 @@ google-nest-sdm==9.1.2
google-photos-library-api==0.12.1
# homeassistant.components.google_air_quality
google_air_quality_api==3.0.0
google_air_quality_api==3.0.1
# homeassistant.components.slide
# homeassistant.components.slide_local
@@ -1149,7 +1149,7 @@ inkbird-ble==1.1.1
insteon-frontend-home-assistant==0.6.1
# homeassistant.components.intellifire
intellifire4py==4.2.1
intellifire4py==4.3.1
# homeassistant.components.iometer
iometer==0.3.0
@@ -1228,7 +1228,7 @@ libpyfoscamcgi==0.0.9
libpyvivotek==0.6.1
# homeassistant.components.libre_hardware_monitor
librehardwaremonitor-api==1.8.4
librehardwaremonitor-api==1.9.1
# homeassistant.components.mikrotik
librouteros==3.2.0
@@ -1436,7 +1436,7 @@ omnilogic==0.4.5
ondilo==0.5.0
# homeassistant.components.onedrive
onedrive-personal-sdk==0.1.1
onedrive-personal-sdk==0.1.2
# homeassistant.components.onvif
onvif-zeep-async==4.0.4
@@ -1687,7 +1687,7 @@ pycsspeechtts==1.0.8
pycync==0.5.0
# homeassistant.components.daikin
pydaikin==2.17.1
pydaikin==2.17.2
# homeassistant.components.deako
pydeako==0.6.0
@@ -1730,7 +1730,7 @@ pyegps==0.2.5
pyemoncms==0.1.3
# homeassistant.components.enphase_envoy
pyenphase==2.4.3
pyenphase==2.4.5
# homeassistant.components.everlights
pyeverlights==0.1.0
@@ -1907,7 +1907,7 @@ pynina==1.0.2
pynintendoauth==1.0.2
# homeassistant.components.nintendo_parental_controls
pynintendoparental==2.3.2
pynintendoparental==2.3.2.1
# homeassistant.components.nobo_hub
pynobo==1.8.1
@@ -2060,7 +2060,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.3
# homeassistant.components.smartthings
pysmartthings==3.5.1
pysmartthings==3.5.2
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -2187,7 +2187,7 @@ python-rabbitair==0.0.8
python-roborock==4.8.0
# homeassistant.components.smarttub
python-smarttub==0.0.46
python-smarttub==0.0.47
# homeassistant.components.snoo
python-snoo==0.8.3
@@ -2217,7 +2217,7 @@ pytile==2024.12.0
pytomorrowio==0.3.6
# homeassistant.components.touchline_sl
pytouchlinesl==0.5.0
pytouchlinesl==0.6.0
# homeassistant.components.traccar
# homeassistant.components.traccar_server
@@ -2320,7 +2320,7 @@ renault-api==0.5.3
renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.18.2
reolink-aio==0.19.0
# homeassistant.components.rflink
rflink==0.0.67
@@ -2763,7 +2763,7 @@ zeroconf==0.148.0
zeversolar==0.3.2
# homeassistant.components.zha
zha==0.0.89
zha==0.0.90
# homeassistant.components.zwave_js
zwave-js-server-python==0.68.0

View File

@@ -79,9 +79,9 @@ httplib2>=0.19.0
# gRPC is an implicit dependency that we want to make explicit so we manage
# upgrades intentionally. It is a large package to build from source and we
# want to ensure we have wheels built.
grpcio==1.75.1
grpcio-status==1.75.1
grpcio-reflection==1.75.1
grpcio==1.78.0
grpcio-status==1.78.0
grpcio-reflection==1.78.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
@@ -225,6 +225,9 @@ aiomqtt>=2.5.0
# used by sharkiq==1.5.0
# https://github.com/auth0/auth0-python/releases/tag/5.0.0
auth0-python<5.0
# Setuptools >=82.0.0 doesn't contain pkg_resources anymore
setuptools<82.0.0
"""
GENERATED_MESSAGE = (

View File

@@ -955,3 +955,199 @@ async def test_upload_cancelled(
# CancelledError propagates up and causes a 500 error
assert resp.status == 500
assert any("cancelled" in msg for msg in caplog.messages)
async def test_metadata_download_timeout_during_list(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that metadata download timeout during list is handled gracefully."""
client = await hass_ws_client(hass)
mock_metadata = Mock()
mock_metadata.file_name = "testprefix/slow.metadata.json"
mock_tar = Mock()
mock_tar.file_name = "testprefix/slow.tar"
mock_tar.size = TEST_BACKUP.size
def mock_ls(_self, _prefix=""):
return iter([(mock_metadata, None), (mock_tar, None)])
with (
patch.object(BucketSimulator, "ls", mock_ls),
patch(
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
side_effect=TimeoutError,
),
caplog.at_level(logging.WARNING),
):
await client.send_json_auto_id({"type": "backup/info"})
response = await client.receive_json()
assert response["success"]
# The backup should not appear in the list due to timeout
assert len(response["result"]["backups"]) == 0
assert any("Timeout downloading metadata file" in msg for msg in caplog.messages)
async def test_metadata_download_timeout_during_find_by_id(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that metadata download timeout during find by ID is handled gracefully."""
client = await hass_ws_client(hass)
mock_metadata = Mock()
mock_metadata.file_name = f"testprefix/{TEST_BACKUP.backup_id}.metadata.json"
mock_tar = Mock()
mock_tar.file_name = f"testprefix/{TEST_BACKUP.backup_id}.tar"
mock_tar.size = TEST_BACKUP.size
def mock_ls(_self, _prefix=""):
return iter([(mock_metadata, None), (mock_tar, None)])
with (
patch.object(BucketSimulator, "ls", mock_ls),
patch(
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
side_effect=TimeoutError,
),
caplog.at_level(logging.WARNING),
):
await client.send_json_auto_id(
{"type": "backup/details", "backup_id": TEST_BACKUP.backup_id}
)
response = await client.receive_json()
assert response["success"]
# The backup should not be found due to timeout
assert response["result"]["backup"] is None
assert any(
"Timeout downloading metadata file" in msg
and "while searching for backup" in msg
for msg in caplog.messages
)
async def test_metadata_timeout_does_not_block_healthy_backups(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that a timed out metadata download doesn't prevent listing other backups."""
client = await hass_ws_client(hass)
mock_hanging_metadata = Mock()
mock_hanging_metadata.file_name = "testprefix/hanging_backup.metadata.json"
mock_hanging_metadata.download = Mock(side_effect=B2Error("SSL failure"))
mock_hanging_tar = Mock()
mock_hanging_tar.file_name = "testprefix/hanging_backup.tar"
mock_hanging_tar.size = 1000
mock_healthy_metadata = Mock()
mock_healthy_metadata.file_name = (
f"testprefix/{TEST_BACKUP.backup_id}.metadata.json"
)
mock_healthy_download = Mock()
mock_healthy_response = Mock()
mock_healthy_response.content = json.dumps(BACKUP_METADATA).encode()
mock_healthy_download.response = mock_healthy_response
mock_healthy_metadata.download = Mock(return_value=mock_healthy_download)
mock_healthy_tar = Mock()
mock_healthy_tar.file_name = f"testprefix/{TEST_BACKUP.backup_id}.tar"
mock_healthy_tar.size = TEST_BACKUP.size
def mock_ls(_self, _prefix=""):
return iter(
[
(mock_hanging_metadata, None),
(mock_hanging_tar, None),
(mock_healthy_metadata, None),
(mock_healthy_tar, None),
]
)
call_count = 0
original_wait_for = asyncio.wait_for
async def wait_for_first_timeout(coro, *, timeout=None):
nonlocal call_count
call_count += 1
if call_count == 1:
raise TimeoutError
return await original_wait_for(coro, timeout=timeout)
with (
patch.object(BucketSimulator, "ls", mock_ls),
patch(
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
wait_for_first_timeout,
),
caplog.at_level(logging.WARNING),
):
await client.send_json_auto_id({"type": "backup/info"})
response = await client.receive_json()
assert response["success"]
backups = response["result"]["backups"]
assert len(backups) == 1
assert backups[0]["backup_id"] == TEST_BACKUP.backup_id
assert any("Timeout downloading metadata file" in msg for msg in caplog.messages)
async def test_metadata_download_timeout_during_get_backup(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test timeout on metadata re-download after file is found."""
client = await hass_ws_client(hass)
mock_metadata = Mock()
mock_metadata.file_name = f"testprefix/{TEST_BACKUP.backup_id}.metadata.json"
mock_download = Mock()
mock_response = Mock()
mock_response.content = json.dumps(BACKUP_METADATA).encode()
mock_download.response = mock_response
mock_metadata.download = Mock(return_value=mock_download)
mock_tar = Mock()
mock_tar.file_name = f"testprefix/{TEST_BACKUP.backup_id}.tar"
mock_tar.size = TEST_BACKUP.size
def mock_ls(_self, _prefix=""):
return iter([(mock_metadata, None), (mock_tar, None)])
call_count = 0
original_wait_for = asyncio.wait_for
async def wait_for_second_timeout(coro, *, timeout=None):
nonlocal call_count
call_count += 1
if call_count >= 2:
raise TimeoutError
return await original_wait_for(coro, timeout=timeout)
with (
patch.object(BucketSimulator, "ls", mock_ls),
patch(
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
wait_for_second_timeout,
),
patch("homeassistant.components.backblaze_b2.backup.CACHE_TTL", 0),
):
await client.send_json_auto_id(
{"type": "backup/details", "backup_id": TEST_BACKUP.backup_id}
)
response = await client.receive_json()
assert response["success"]
assert (
f"{DOMAIN}.{mock_config_entry.entry_id}" in response["result"]["agent_errors"]
)

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import base64
import datetime
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
@@ -223,9 +224,40 @@ async def test_prepare_chat_for_generation_passes_messages_through(
) -> None:
"""Test that prepared messages are forwarded unchanged."""
chat_log = conversation.ChatLog(hass, "conversation-id")
chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(agent_id="agent", content="Ready")
chat_log.async_add_user_content(
conversation.UserContent(content="What time is it?")
)
chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id="agent",
tool_calls=[
llm.ToolInput(
tool_name="HassGetCurrentTime",
tool_args={},
id="mock-tool-call-id",
external=True,
)
],
)
)
chat_log.async_add_assistant_content_without_tools(
conversation.ToolResultContent(
agent_id="agent",
tool_call_id="mock-tool-call-id",
tool_name="HassGetCurrentTime",
tool_result={
"speech": {"plain": {"speech": "12:00 PM", "extra_data": None}},
"response_type": "action_done",
"speech_slots": {"time": datetime.time(12, 0)},
"data": {"targets": [], "success": [], "failed": []},
},
)
)
chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(agent_id="agent", content="12:00 PM")
)
messages = _convert_content_to_param(chat_log.content)
response = await cloud_entity._prepare_chat_for_generation(chat_log, messages)

View File

@@ -367,6 +367,57 @@ async def test_agents_upload_network_failure(
assert "Upload failed for cloudflare_r2" in caplog.text
async def test_multipart_upload_consistent_part_sizes(
hass: HomeAssistant,
mock_client: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test that multipart upload uses consistent part sizes.
S3/R2 requires all non-trailing parts to have the same size. This test
verifies that varying chunk sizes still result in consistent part sizes.
"""
agent = R2BackupAgent(hass, mock_config_entry)
# simulate varying chunk data sizes
# total data: 12 + 12 + 10 + 12 + 5 = 51 MiB
chunk_sizes = [12, 12, 10, 12, 5] # in units of 1 MiB
mib = 2**20
async def mock_stream():
for size in chunk_sizes:
yield b"x" * (size * mib)
async def open_stream():
return mock_stream()
# Record the sizes of each uploaded part
uploaded_part_sizes: list[int] = []
async def record_upload_part(**kwargs):
body = kwargs.get("Body", b"")
uploaded_part_sizes.append(len(body))
return {"ETag": f"etag-{len(uploaded_part_sizes)}"}
mock_client.upload_part.side_effect = record_upload_part
await agent._upload_multipart("test.tar", open_stream)
# Verify that all non-trailing parts have the same size
assert len(uploaded_part_sizes) >= 2, "Expected at least 2 parts"
non_trailing_parts = uploaded_part_sizes[:-1]
assert all(size == MULTIPART_MIN_PART_SIZE_BYTES for size in non_trailing_parts), (
f"All non-trailing parts should be {MULTIPART_MIN_PART_SIZE_BYTES} bytes, got {non_trailing_parts}"
)
# Verify the trailing part contains the remainder
total_data = sum(chunk_sizes) * mib
expected_trailing = total_data % MULTIPART_MIN_PART_SIZE_BYTES
if expected_trailing == 0:
expected_trailing = MULTIPART_MIN_PART_SIZE_BYTES
assert uploaded_part_sizes[-1] == expected_trailing
async def test_agents_download(
hass_client: ClientSessionGenerator,
mock_client: MagicMock,

View File

@@ -168,7 +168,7 @@ async def setup_evohome(
"evohomeasync2.auth.CredentialsManagerBase._post_request",
mock_post_request(install),
),
patch("evohome.auth.AbstractAuth._make_request", mock_make_request(install)),
patch("_evohome.auth.AbstractAuth._make_request", mock_make_request(install)),
):
evo: EvohomeClient | None = None

View File

@@ -31,13 +31,9 @@ _MSG_USR = (
"special characters accepted via the vendor's website are not valid here."
)
LOG_HINT_429_CREDS = ("evohome.credentials", logging.ERROR, _MSG_429)
LOG_HINT_OTH_CREDS = ("evohome.credentials", logging.ERROR, _MSG_OTH)
LOG_HINT_USR_CREDS = ("evohome.credentials", logging.ERROR, _MSG_USR)
LOG_HINT_429_AUTH = ("evohome.auth", logging.ERROR, _MSG_429)
LOG_HINT_OTH_AUTH = ("evohome.auth", logging.ERROR, _MSG_OTH)
LOG_HINT_USR_AUTH = ("evohome.auth", logging.ERROR, _MSG_USR)
LOG_HINT_429_AUTH = ("evohomeasync2.auth", logging.ERROR, _MSG_429)
LOG_HINT_OTH_AUTH = ("evohomeasync2.auth", logging.ERROR, _MSG_OTH)
LOG_HINT_USR_AUTH = ("evohomeasync2.auth", logging.ERROR, _MSG_USR)
LOG_FAIL_CONNECTION = (
"homeassistant.components.evohome",
@@ -110,10 +106,10 @@ EXC_BAD_GATEWAY = aiohttp.ClientResponseError(
)
AUTHENTICATION_TESTS: dict[Exception, list] = {
EXC_BAD_CONNECTION: [LOG_HINT_OTH_CREDS, LOG_FAIL_CONNECTION, LOG_SETUP_FAILED],
EXC_BAD_CREDENTIALS: [LOG_HINT_USR_CREDS, LOG_FAIL_CREDENTIALS, LOG_SETUP_FAILED],
EXC_BAD_GATEWAY: [LOG_HINT_OTH_CREDS, LOG_FAIL_GATEWAY, LOG_SETUP_FAILED],
EXC_TOO_MANY_REQUESTS: [LOG_HINT_429_CREDS, LOG_FAIL_TOO_MANY, LOG_SETUP_FAILED],
EXC_BAD_CONNECTION: [LOG_HINT_OTH_AUTH, LOG_FAIL_CONNECTION, LOG_SETUP_FAILED],
EXC_BAD_CREDENTIALS: [LOG_HINT_USR_AUTH, LOG_FAIL_CREDENTIALS, LOG_SETUP_FAILED],
EXC_BAD_GATEWAY: [LOG_HINT_OTH_AUTH, LOG_FAIL_GATEWAY, LOG_SETUP_FAILED],
EXC_TOO_MANY_REQUESTS: [LOG_HINT_429_AUTH, LOG_FAIL_TOO_MANY, LOG_SETUP_FAILED],
}
CLIENT_REQUEST_TESTS: dict[Exception, list] = {
@@ -137,7 +133,8 @@ async def test_authentication_failure_v2(
with (
patch(
"evohome.credentials.CredentialsManagerBase._request", side_effect=exception
"_evohome.credentials.CredentialsManagerBase._request",
side_effect=exception,
),
caplog.at_level(logging.WARNING),
):
@@ -165,7 +162,7 @@ async def test_client_request_failure_v2(
"evohomeasync2.auth.CredentialsManagerBase._post_request",
mock_post_request("default"),
),
patch("evohome.auth.AbstractAuth._request", side_effect=exception),
patch("_evohome.auth.AbstractAuth._request", side_effect=exception),
caplog.at_level(logging.WARNING),
):
result = await async_setup_component(hass, DOMAIN, {DOMAIN: config})

View File

@@ -155,21 +155,21 @@ async def test_automatic_offset(hass: HomeAssistant, fritz: Mock) -> None:
async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
"""Test update with error."""
device = FritzDeviceClimateMock()
fritz().update_devices.side_effect = HTTPError("Boom")
fritz().update_devices.side_effect = ["", HTTPError("Boom"), ""]
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
assert entry.state is ConfigEntryState.SETUP_RETRY
assert entry.state is ConfigEntryState.LOADED
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 2
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
next_update = dt_util.utcnow() + timedelta(seconds=35)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
assert fritz().update_devices.call_count == 4
assert fritz().login.call_count == 4
assert fritz().update_devices.call_count == 3
assert fritz().login.call_count == 2
@pytest.mark.parametrize(

View File

@@ -40,14 +40,19 @@ async def test_coordinator_update_after_reboot(
unique_id="any",
)
entry.add_to_hass(hass)
fritz().update_devices.side_effect = [HTTPError(), ""]
fritz().update_devices.side_effect = ["", HTTPError()]
assert await hass.config_entries.async_setup(entry.entry_id)
assert fritz().update_devices.call_count == 2
assert fritz().update_devices.call_count == 1
assert fritz().update_templates.call_count == 1
assert fritz().get_devices.call_count == 1
assert fritz().get_templates.call_count == 1
assert fritz().login.call_count == 2
assert fritz().login.call_count == 1
async_fire_time_changed(hass, utcnow() + timedelta(seconds=35))
await hass.async_block_till_done(wait_background_tasks=True)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_coordinator_update_after_password_change(
@@ -60,14 +65,10 @@ async def test_coordinator_update_after_password_change(
unique_id="any",
)
entry.add_to_hass(hass)
fritz().update_devices.side_effect = HTTPError()
fritz().login.side_effect = ["", LoginError("some_user")]
fritz().login.side_effect = [LoginError("some_user")]
assert not await hass.config_entries.async_setup(entry.entry_id)
assert fritz().update_devices.call_count == 1
assert fritz().get_devices.call_count == 0
assert fritz().get_templates.call_count == 0
assert fritz().login.call_count == 2
assert entry.state is ConfigEntryState.SETUP_ERROR
async def test_coordinator_update_when_unreachable(
@@ -80,9 +81,10 @@ async def test_coordinator_update_when_unreachable(
unique_id="any",
)
entry.add_to_hass(hass)
fritz().update_devices.side_effect = [ConnectionError(), ""]
fritz().update_devices.side_effect = [ConnectionError()]
assert not await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done(wait_background_tasks=True)
assert entry.state is ConfigEntryState.SETUP_RETRY

View File

@@ -248,20 +248,21 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
device.get_colors.return_value = {
"Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")]
}
fritz().update_devices.side_effect = HTTPError("Boom")
fritz().update_devices.side_effect = ["", HTTPError("Boom"), ""]
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
assert entry.state is ConfigEntryState.SETUP_RETRY
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 2
assert entry.state is ConfigEntryState.LOADED
next_update = dt_util.utcnow() + timedelta(seconds=200)
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=35)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
assert fritz().update_devices.call_count == 4
assert fritz().login.call_count == 4
assert fritz().update_devices.call_count == 3
assert fritz().login.call_count == 2
async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:

View File

@@ -80,20 +80,21 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
"""Test update with error."""
device = FritzDeviceSensorMock()
fritz().update_devices.side_effect = HTTPError("Boom")
fritz().update_devices.side_effect = ["", HTTPError("Boom"), ""]
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
assert entry.state is ConfigEntryState.SETUP_RETRY
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 2
assert entry.state is ConfigEntryState.LOADED
next_update = dt_util.utcnow() + timedelta(seconds=200)
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=35)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
assert fritz().update_devices.call_count == 4
assert fritz().login.call_count == 4
assert fritz().update_devices.call_count == 3
assert fritz().login.call_count == 2
async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:

View File

@@ -136,20 +136,21 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
"""Test update with error."""
device = FritzDeviceSwitchMock()
fritz().update_devices.side_effect = HTTPError("Boom")
fritz().update_devices.side_effect = ["", HTTPError("Boom"), ""]
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
assert entry.state is ConfigEntryState.SETUP_RETRY
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 2
assert entry.state is ConfigEntryState.LOADED
next_update = dt_util.utcnow() + timedelta(seconds=200)
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=35)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
assert fritz().update_devices.call_count == 4
assert fritz().login.call_count == 4
assert fritz().update_devices.call_count == 3
assert fritz().login.call_count == 2
async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> None:

View File

@@ -1,6 +1,57 @@
# serializer version: 1
# name: test_function_call
list([
Content(
parts=[
Part(
text='What time is it?'
),
],
role='user'
),
Content(
parts=[
Part(
function_call=FunctionCall(
args={},
name='HassGetCurrentTime'
)
),
],
role='model'
),
Content(
parts=[
Part(
function_response=FunctionResponse(
name='HassGetCurrentTime',
response={
'data': {
'failed': [],
'success': [],
'targets': []
},
'response_type': 'action_done',
'speech': {
'plain': {<... 2 items at Max depth ...>}
},
'speech_slots': {
'time': '16:24:17.813343'
}
}
)
),
],
role='user'
),
Content(
parts=[
Part(
text='4:24 PM'
),
],
role='model'
),
Content(
parts=[
Part(

View File

@@ -1,5 +1,6 @@
"""Tests for the Google Generative AI Conversation integration conversation platform."""
import datetime
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
@@ -8,7 +9,11 @@ import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components import conversation
from homeassistant.components.conversation import UserContent
from homeassistant.components.conversation import (
AssistantContent,
ToolResultContent,
UserContent,
)
from homeassistant.components.google_generative_ai_conversation.entity import (
ERROR_GETTING_RESPONSE,
_escape_decode,
@@ -17,6 +22,7 @@ from homeassistant.components.google_generative_ai_conversation.entity import (
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import intent
from homeassistant.helpers.llm import ToolInput
from . import API_ERROR_500, CLIENT_ERROR_BAD_REQUEST
@@ -87,6 +93,41 @@ async def test_function_call(
agent_id = "conversation.google_ai_conversation"
context = Context()
# Add some pre-existing content from conversation.default_agent
mock_chat_log.async_add_user_content(UserContent(content="What time is it?"))
mock_chat_log.async_add_assistant_content_without_tools(
AssistantContent(
agent_id=agent_id,
tool_calls=[
ToolInput(
tool_name="HassGetCurrentTime",
tool_args={},
id="01KGW7TFC1VVVK7ANHVMDA4DJ6",
external=True,
)
],
)
)
mock_chat_log.async_add_assistant_content_without_tools(
ToolResultContent(
agent_id=agent_id,
tool_call_id="01KGW7TFC1VVVK7ANHVMDA4DJ6",
tool_name="HassGetCurrentTime",
tool_result={
"speech": {"plain": {"speech": "4:24 PM", "extra_data": None}},
"response_type": "action_done",
"speech_slots": {"time": datetime.time(16, 24, 17, 813343)},
"data": {"targets": [], "success": [], "failed": []},
},
)
)
mock_chat_log.async_add_assistant_content_without_tools(
AssistantContent(
agent_id=agent_id,
content="4:24 PM",
)
)
messages = [
# Function call stream
[

View File

@@ -48,13 +48,14 @@ def mock_api() -> Generator[MagicMock]:
mock_api_instance = MagicMock()
# Mock the API response data
# Today's prices: 0.20 + (hour * 0.01)
# API returns prices in Cent/kWh (e.g., 25.0 Cent/kWh = 0.25 €/kWh)
# Today's prices: 20 + (hour * 1) Cent/kWh
today_prices = {
f"gpe_price_{hour:02d}": 0.20 + (hour * 0.01) for hour in range(24)
f"gpe_price_{hour:02d}": 20.0 + (hour * 1.0) for hour in range(24)
}
# Tomorrow's prices: 0.25 + (hour * 0.01) (slightly different for testing)
# Tomorrow's prices: 25 + (hour * 1) Cent/kWh (slightly different for testing)
tomorrow_prices = {
f"gpe_price_{hour:02d}_tomorrow": 0.25 + (hour * 0.01) for hour in range(24)
f"gpe_price_{hour:02d}_tomorrow": 25.0 + (hour * 1.0) for hour in range(24)
}
# Combine all prices
@@ -63,24 +64,24 @@ def mock_api() -> Generator[MagicMock]:
# Make get_electricity_prices async since coordinator uses it
mock_api_instance.get_electricity_prices = AsyncMock(return_value=all_prices)
# Mock the calculation methods to return actual values (not coroutines)
# Highest price today: 0.20 + (23 * 0.01) = 0.43 at hour 23
mock_api_instance.get_highest_price_today.return_value = 0.43
mock_api_instance.get_highest_price_today_with_hour.return_value = (0.43, 23)
# Mock the calculation methods to return actual values in Cent/kWh (not coroutines)
# Highest price today: 20 + (23 * 1) = 43 Cent/kWh at hour 23
mock_api_instance.get_highest_price_today.return_value = 43.0
mock_api_instance.get_highest_price_today_with_hour.return_value = (43.0, 23)
# Lowest price day (6-22): 0.20 + (6 * 0.01) = 0.26 at hour 6
mock_api_instance.get_lowest_price_day.return_value = 0.26
mock_api_instance.get_lowest_price_day_with_hour.return_value = (0.26, 6)
# Lowest price day (6-22): 20 + (6 * 1) = 26 Cent/kWh at hour 6
mock_api_instance.get_lowest_price_day.return_value = 26.0
mock_api_instance.get_lowest_price_day_with_hour.return_value = (26.0, 6)
# Lowest price night (22-6): 0.20 + (0 * 0.01) = 0.20 at hour 0
mock_api_instance.get_lowest_price_night.return_value = 0.20
mock_api_instance.get_lowest_price_night_with_hour.return_value = (0.20, 0)
# Lowest price night (22-6): 20 + (0 * 1) = 20 Cent/kWh at hour 0
mock_api_instance.get_lowest_price_night.return_value = 20.0
mock_api_instance.get_lowest_price_night_with_hour.return_value = (20.0, 0)
# Current price depends on the hour passed to the method
# Mock get_current_price to return the price for the requested hour
# Mock get_current_price to return the price for the requested hour in Cent/kWh
def get_current_price_mock(data, hour):
"""Return price for a specific hour."""
return 0.20 + (hour * 0.01)
"""Return price for a specific hour in Cent/kWh."""
return 20.0 + (hour * 1.0)
mock_api_instance.get_current_price.side_effect = get_current_price_mock

View File

@@ -439,7 +439,7 @@
'state': '5.030',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-entry]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -454,7 +454,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -462,12 +462,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'CPU Fan Fan',
'object_id_base': 'CPU Fan Speed',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'CPU Fan Fan',
'original_name': 'CPU Fan Speed',
'platform': 'libre_hardware_monitor',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -477,17 +477,17 @@
'unit_of_measurement': 'RPM',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-state]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -549,7 +549,7 @@
'state': '55.0',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-entry]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -564,7 +564,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -572,12 +572,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'Pump Fan Fan',
'object_id_base': 'Pump Fan Speed',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Pump Fan Fan',
'original_name': 'Pump Fan Speed',
'platform': 'libre_hardware_monitor',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -587,24 +587,24 @@
'unit_of_measurement': 'RPM',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-state]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-entry]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -619,7 +619,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -627,12 +627,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'System Fan #1 Fan',
'object_id_base': 'System Fan #1 Speed',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'System Fan #1 Fan',
'original_name': 'System Fan #1 Speed',
'platform': 'libre_hardware_monitor',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -642,16 +642,16 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-state]
# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Speed',
'max_value': None,
'min_value': None,
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -933,7 +933,7 @@
'state': '36.0',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-entry]
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -948,7 +948,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -956,12 +956,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'GPU Fan 1 Fan',
'object_id_base': 'GPU Fan 1 Speed',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GPU Fan 1 Fan',
'original_name': 'GPU Fan 1 Speed',
'platform': 'libre_hardware_monitor',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -971,24 +971,24 @@
'unit_of_measurement': 'RPM',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-state]
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-entry]
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -1003,7 +1003,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -1011,12 +1011,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'GPU Fan 2 Fan',
'object_id_base': 'GPU Fan 2 Speed',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GPU Fan 2 Fan',
'original_name': 'GPU Fan 2 Speed',
'platform': 'libre_hardware_monitor',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -1026,17 +1026,17 @@
'unit_of_measurement': 'RPM',
})
# ---
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-state]
# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1452,14 +1452,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1467,14 +1467,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1482,13 +1482,13 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Speed',
'max_value': None,
'min_value': None,
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1706,14 +1706,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1721,14 +1721,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1830,14 +1830,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1845,14 +1845,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -1860,13 +1860,13 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan',
'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Speed',
'max_value': None,
'min_value': None,
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan',
'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -2084,14 +2084,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -2099,14 +2099,14 @@
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan',
'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Speed',
'max_value': '0',
'min_value': '0',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'RPM',
}),
'context': <ANY>,
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan',
'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,

View File

@@ -4,7 +4,8 @@ import re
from unittest.mock import AsyncMock, Mock, patch
import httpx
from mcp.types import CallToolResult, ListToolsResult, TextContent, Tool
from mcp import McpError
from mcp.types import CallToolResult, ErrorData, ListToolsResult, TextContent, Tool
import pytest
import voluptuous as vol
@@ -136,30 +137,44 @@ async def test_mcp_server_sse_transport_failure(
"Connection error", [httpx.ConnectError("Connection failed")]
)
await hass.config_entries.async_setup(config_entry.entry_id)
assert config_entry.state is ConfigEntryState.SETUP_RETRY
@pytest.mark.parametrize(
("side_effect"),
[
(
ExceptionGroup(
"Method not allowed",
[
httpx.HTTPStatusError(
"Method not allowed",
request=None,
response=httpx.Response(405),
)
],
),
),
(
ExceptionGroup(
"Some exception group",
[McpError(ErrorData(code=500, message="Session terminated"))],
)
),
],
)
async def test_mcp_client_fallback_to_sse_success(
hass: HomeAssistant,
config_entry: MockConfigEntry,
mock_http_streamable_client: AsyncMock,
mock_sse_client: AsyncMock,
mock_mcp_client: Mock,
side_effect: Exception,
) -> None:
"""Test mcp_client falls back to SSE on method not allowed error.
"""Test mcp_client falls back to SSE on some errors.
This exercises the backwards compatibility part of the MCP Transport
specification.
"""
http_405 = httpx.HTTPStatusError(
"Method not allowed",
request=None, # type: ignore[arg-type]
response=httpx.Response(405),
)
mock_http_streamable_client.side_effect = ExceptionGroup(
"Method not allowed", [http_405]
)
mock_http_streamable_client.side_effect = side_effect
# Setup mocks for SSE fallback
mock_sse_client.return_value.__aenter__.return_value = ("read", "write")

View File

@@ -6,6 +6,7 @@ from http import HTTPStatus
import json
import logging
from typing import Any
from unittest.mock import AsyncMock, patch
import aiohttp
import mcp
@@ -478,3 +479,42 @@ async def test_get_unknown_prompt(
async with mcp_client(hass, mcp_url, hass_supervisor_access_token) as session:
with pytest.raises(McpError):
await session.get_prompt(name="Unknown")
@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST])
async def test_mcp_tool_call_unicode(
hass: HomeAssistant,
setup_integration: None,
mcp_url: str,
mcp_client: Any,
hass_supervisor_access_token: str,
) -> None:
"""Test the tool call endpoint preserves unicode characters."""
# Mock the API instance
mock_api = AsyncMock()
mock_api.api.name = "Assist"
mock_api.tools = []
mock_api.custom_serializer = None
mock_api.async_call_tool.return_value = {"message": "这是一个测试"}
# We need to ensure when the server calls llm.async_get_api, it gets our mock
# async_get_api is awaited, so we need an AsyncMock
with patch(
"homeassistant.helpers.llm.async_get_api", new_callable=AsyncMock
) as mock_get_api:
mock_get_api.return_value = mock_api
async with mcp_client(hass, mcp_url, hass_supervisor_access_token) as session:
result = await session.call_tool(
name="AnyTool",
arguments={},
)
assert not result.isError
assert len(result.content) == 1
assert result.content[0].type == "text"
# Check that the text contains the raw unicode characters, NOT the escaped version
response_text = result.content[0].text
assert "这是一个测试" in response_text
assert "\\u" not in response_text

View File

@@ -4782,6 +4782,8 @@
'pre_heating',
'process_finished',
'process_running',
'roasting',
'searing',
]),
}),
'config_entry_id': <ANY>,
@@ -4826,6 +4828,8 @@
'pre_heating',
'process_finished',
'process_running',
'roasting',
'searing',
]),
}),
'context': <ANY>,
@@ -7564,6 +7568,8 @@
'pre_heating',
'process_finished',
'process_running',
'roasting',
'searing',
]),
}),
'config_entry_id': <ANY>,
@@ -7608,6 +7614,8 @@
'pre_heating',
'process_finished',
'process_running',
'roasting',
'searing',
]),
}),
'context': <ANY>,

View File

@@ -1,6 +1,7 @@
"""Tests for the Ollama integration."""
from collections.abc import AsyncGenerator
import datetime
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
@@ -23,6 +24,10 @@ from homeassistant.helpers import (
)
from tests.common import MockConfigEntry
from tests.components.conversation import (
MockChatLog,
mock_chat_log, # noqa: F401
)
@pytest.fixture(autouse=True)
@@ -458,6 +463,102 @@ async def test_function_exception(
)
async def test_history_conversion(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_chat_log: MockChatLog, # noqa: F811
) -> None:
"""Test that the pre-existing chat_log history is handled properly."""
agent_id = "conversation.ollama_conversation"
# Add some pre-existing content from conversation.default_agent
mock_chat_log.async_add_user_content(
conversation.UserContent(content="What time is it?")
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id=agent_id,
tool_calls=[
llm.ToolInput(
tool_name="HassGetCurrentTime",
tool_args={},
id="01KGW7TFC1VVVK7ANHVMDA4DJ6",
external=True,
)
],
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.ToolResultContent(
agent_id=agent_id,
tool_call_id="01KGW7TFC1VVVK7ANHVMDA4DJ6",
tool_name="HassGetCurrentTime",
tool_result={
"speech": {"plain": {"speech": "4:24 PM", "extra_data": None}},
"response_type": "action_done",
"speech_slots": {"time": datetime.time(16, 24, 17, 813343)},
"data": {"targets": [], "success": [], "failed": []},
},
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id=agent_id,
content="4:24 PM",
)
)
entry = MockConfigEntry()
entry.add_to_hass(hass)
with patch(
"ollama.AsyncClient.chat",
return_value=stream_generator(
{"message": {"role": "assistant", "content": "test response"}}
),
) as mock_chat:
result = await conversation.async_converse(
hass,
"test message",
mock_chat_log.conversation_id,
Context(),
agent_id=agent_id,
)
assert mock_chat.call_count == 1
args = mock_chat.call_args.kwargs
prompt = args["messages"][0]["content"]
assert args["model"] == "test_model:latest"
assert args["messages"] == [
Message(role="system", content=prompt),
Message(role="user", content="What time is it?"),
Message(
role="assistant",
tool_calls=[
Message.ToolCall(
function=Message.ToolCall.Function(
name="HassGetCurrentTime", arguments={}
)
)
],
),
Message(
role="tool",
content='{"speech":{"plain":{"speech":"4:24 PM","extra_data":null}},"response_type":"action_done","speech_slots":{"time":"16:24:17.813343"},"data":{"targets":[],"success":[],"failed":[]}}',
),
Message(role="assistant", content="4:24 PM"),
Message(role="user", content="test message"),
]
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
result
)
assert result.response.speech["plain"]["speech"] == "test response"
async def test_unknown_hass_api(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,

View File

@@ -126,6 +126,65 @@
# ---
# name: test_function_call[True]
list([
dict({
'attachments': None,
'content': 'What time is it?',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'user',
}),
dict({
'agent_id': 'conversation.gpt_3_5_turbo',
'content': None,
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'native': None,
'role': 'assistant',
'thinking_content': None,
'tool_calls': list([
dict({
'external': True,
'id': 'mock_tool_call_id',
'tool_args': dict({
}),
'tool_name': 'HassGetCurrentTime',
}),
]),
}),
dict({
'agent_id': 'conversation.gpt_3_5_turbo',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'tool_result',
'tool_call_id': 'mock_tool_call_id',
'tool_name': 'HassGetCurrentTime',
'tool_result': dict({
'data': dict({
'failed': list([
]),
'success': list([
]),
'targets': list([
]),
}),
'response_type': 'action_done',
'speech': dict({
'plain': dict({
'extra_data': None,
'speech': '12:00 PM',
}),
}),
'speech_slots': dict({
'time': datetime.time(12, 0),
}),
}),
}),
dict({
'agent_id': 'conversation.gpt_3_5_turbo',
'content': '12:00 PM',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'native': None,
'role': 'assistant',
'thinking_content': None,
'tool_calls': None,
}),
dict({
'attachments': None,
'content': 'Please call the test function',
@@ -169,3 +228,68 @@
}),
])
# ---
# name: test_function_call[True].1
list([
dict({
'content': '''
You are a helpful assistant.
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
''',
'role': 'system',
}),
dict({
'content': 'What time is it?',
'role': 'user',
}),
dict({
'content': None,
'role': 'assistant',
'tool_calls': list([
dict({
'function': dict({
'arguments': '{}',
'name': 'HassGetCurrentTime',
}),
'id': 'mock_tool_call_id',
'type': 'function',
}),
]),
}),
dict({
'content': '{"speech":{"plain":{"speech":"12:00 PM","extra_data":null}},"response_type":"action_done","speech_slots":{"time":"12:00:00"},"data":{"targets":[],"success":[],"failed":[]}}',
'role': 'tool',
'tool_call_id': 'mock_tool_call_id',
}),
dict({
'content': '12:00 PM',
'role': 'assistant',
}),
dict({
'content': 'Please call the test function',
'role': 'user',
}),
dict({
'content': None,
'role': 'assistant',
'tool_calls': list([
dict({
'function': dict({
'arguments': '{"param1":"call1"}',
'name': 'test_tool',
}),
'id': 'call_call_1',
'type': 'function',
}),
]),
}),
dict({
'content': '"value1"',
'role': 'tool',
'tool_call_id': 'call_call_1',
}),
dict({
'content': 'I have successfully called the function',
'role': 'assistant',
}),
])
# ---

View File

@@ -1,5 +1,6 @@
"""Tests for the OpenRouter integration."""
import datetime
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
@@ -18,6 +19,7 @@ from homeassistant.components import conversation
from homeassistant.const import Platform
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry as er, intent
from homeassistant.helpers.llm import ToolInput
from . import setup_integration
@@ -88,6 +90,43 @@ async def test_function_call(
"""Test function call from the assistant."""
await setup_integration(hass, mock_config_entry)
# Add some pre-existing content from conversation.default_agent
mock_chat_log.async_add_user_content(
conversation.UserContent(content="What time is it?")
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id="conversation.gpt_3_5_turbo",
tool_calls=[
ToolInput(
tool_name="HassGetCurrentTime",
tool_args={},
id="mock_tool_call_id",
external=True,
)
],
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.ToolResultContent(
agent_id="conversation.gpt_3_5_turbo",
tool_call_id="mock_tool_call_id",
tool_name="HassGetCurrentTime",
tool_result={
"speech": {"plain": {"speech": "12:00 PM", "extra_data": None}},
"response_type": "action_done",
"speech_slots": {"time": datetime.time(12, 0)},
"data": {"targets": [], "success": [], "failed": []},
},
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id="conversation.gpt_3_5_turbo",
content="12:00 PM",
)
)
mock_chat_log.mock_tool_results(
{
"call_call_1": "value1",
@@ -95,34 +134,8 @@ async def test_function_call(
}
)
async def completion_result(*args, messages, **kwargs):
for message in messages:
role = message["role"] if isinstance(message, dict) else message.role
if role == "tool":
return ChatCompletion(
id="chatcmpl-1234567890ZYXWVUTSRQPONMLKJIH",
choices=[
Choice(
finish_reason="stop",
index=0,
message=ChatCompletionMessage(
content="I have successfully called the function",
role="assistant",
function_call=None,
tool_calls=None,
),
)
],
created=1700000000,
model="gpt-4-1106-preview",
object="chat.completion",
system_fingerprint=None,
usage=CompletionUsage(
completion_tokens=9, prompt_tokens=8, total_tokens=17
),
)
return ChatCompletion(
mock_openai_client.chat.completions.create.side_effect = (
ChatCompletion(
id="chatcmpl-1234567890ABCDEFGHIJKLMNOPQRS",
choices=[
Choice(
@@ -152,9 +165,30 @@ async def test_function_call(
usage=CompletionUsage(
completion_tokens=9, prompt_tokens=8, total_tokens=17
),
)
mock_openai_client.chat.completions.create = completion_result
),
ChatCompletion(
id="chatcmpl-1234567890ZYXWVUTSRQPONMLKJIH",
choices=[
Choice(
finish_reason="stop",
index=0,
message=ChatCompletionMessage(
content="I have successfully called the function",
role="assistant",
function_call=None,
tool_calls=None,
),
)
],
created=1700000000,
model="gpt-4-1106-preview",
object="chat.completion",
system_fingerprint=None,
usage=CompletionUsage(
completion_tokens=9, prompt_tokens=8, total_tokens=17
),
),
)
result = await conversation.async_converse(
hass,
@@ -167,3 +201,8 @@ async def test_function_call(
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
# Don't test the prompt, as it's not deterministic
assert mock_chat_log.content[1:] == snapshot
assert mock_openai_client.chat.completions.create.call_count == 2
assert (
mock_openai_client.chat.completions.create.call_args.kwargs["messages"]
== snapshot
)

View File

@@ -7,14 +7,14 @@
'type': 'message',
}),
dict({
'arguments': '{"code": "import math\\nmath.sqrt(55555)", "container": "cntr_A"}',
'arguments': '{"code":"import math\\nmath.sqrt(55555)","container":"cntr_A"}',
'call_id': 'ci_A',
'name': 'code_interpreter',
'type': 'function_call',
}),
dict({
'call_id': 'ci_A',
'output': '{"output": [{"logs": "235.70108188126758\\n", "type": "logs"}]}',
'output': '{"output":[{"logs":"235.70108188126758\\n","type":"logs"}]}',
'type': 'function_call_output',
}),
dict({
@@ -36,6 +36,65 @@
# ---
# name: test_function_call
list([
dict({
'attachments': None,
'content': 'What time is it?',
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'user',
}),
dict({
'agent_id': 'conversation.openai_conversation',
'content': None,
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
'native': None,
'role': 'assistant',
'thinking_content': None,
'tool_calls': list([
dict({
'external': True,
'id': 'mock-tool-call-id',
'tool_args': dict({
}),
'tool_name': 'HassGetCurrentTime',
}),
]),
}),
dict({
'agent_id': 'conversation.openai_conversation',
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'tool_result',
'tool_call_id': 'mock-tool-call-id',
'tool_name': 'HassGetCurrentTime',
'tool_result': dict({
'data': dict({
'failed': list([
]),
'success': list([
]),
'targets': list([
]),
}),
'response_type': 'action_done',
'speech': dict({
'plain': dict({
'extra_data': None,
'speech': '12:00 PM',
}),
}),
'speech_slots': dict({
'time': datetime.time(12, 0),
}),
}),
}),
dict({
'agent_id': 'conversation.openai_conversation',
'content': '12:00 PM',
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
'native': None,
'role': 'assistant',
'thinking_content': None,
'tool_calls': None,
}),
dict({
'attachments': None,
'content': 'Please call the test function',
@@ -125,6 +184,27 @@
# ---
# name: test_function_call.1
list([
dict({
'content': 'What time is it?',
'role': 'user',
'type': 'message',
}),
dict({
'arguments': '{}',
'call_id': 'mock-tool-call-id',
'name': 'HassGetCurrentTime',
'type': 'function_call',
}),
dict({
'call_id': 'mock-tool-call-id',
'output': '{"speech":{"plain":{"speech":"12:00 PM","extra_data":null}},"response_type":"action_done","speech_slots":{"time":"12:00:00"},"data":{"targets":[],"success":[],"failed":[]}}',
'type': 'function_call_output',
}),
dict({
'content': '12:00 PM',
'role': 'assistant',
'type': 'message',
}),
dict({
'content': 'Please call the test function',
'role': 'user',
@@ -146,7 +226,7 @@
'type': 'reasoning',
}),
dict({
'arguments': '{"param1": "call1"}',
'arguments': '{"param1":"call1"}',
'call_id': 'call_call_1',
'name': 'test_tool',
'type': 'function_call',
@@ -157,7 +237,7 @@
'type': 'function_call_output',
}),
dict({
'arguments': '{"param1": "call2"}',
'arguments': '{"param1":"call2"}',
'call_id': 'call_call_2',
'name': 'test_tool',
'type': 'function_call',

View File

@@ -1,5 +1,6 @@
"""Tests for the OpenAI integration."""
import datetime
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
@@ -30,6 +31,7 @@ from homeassistant.components.openai_conversation.const import (
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import intent
from homeassistant.helpers.llm import ToolInput
from homeassistant.setup import async_setup_component
from . import (
@@ -251,6 +253,44 @@ async def test_function_call(
snapshot: SnapshotAssertion,
) -> None:
"""Test function call from the assistant."""
# Add some pre-existing content from conversation.default_agent
mock_chat_log.async_add_user_content(
conversation.UserContent(content="What time is it?")
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id="conversation.openai_conversation",
tool_calls=[
ToolInput(
tool_name="HassGetCurrentTime",
tool_args={},
id="mock-tool-call-id",
external=True,
)
],
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.ToolResultContent(
agent_id="conversation.openai_conversation",
tool_call_id="mock-tool-call-id",
tool_name="HassGetCurrentTime",
tool_result={
"speech": {"plain": {"speech": "12:00 PM", "extra_data": None}},
"response_type": "action_done",
"speech_slots": {"time": datetime.time(12, 0, 0, 0)},
"data": {"targets": [], "success": [], "failed": []},
},
)
)
mock_chat_log.async_add_assistant_content_without_tools(
conversation.AssistantContent(
agent_id="conversation.openai_conversation",
content="12:00 PM",
)
)
mock_create_stream.return_value = [
# Initial conversation
(

View File

@@ -133,8 +133,8 @@ def _init_host_mock(host_mock: MagicMock) -> None:
host_mock.whiteled_mode_list.return_value = []
host_mock.post_recording_time_list.return_value = []
host_mock.zoom_range.return_value = {
"zoom": {"pos": {"min": 0, "max": 100}},
"focus": {"pos": {"min": 0, "max": 100}},
"zoom": {"min": 0, "max": 100},
"focus": {"min": 0, "max": 100},
}
host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]}
host_mock.checked_api_versions = {"GetEvents": 1}

View File

@@ -57,6 +57,7 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM
from . import (
MOCK_MAC,
init_integration,
mutate_rpc_device_status,
patch_platforms,
register_device,
register_entity,
@@ -1047,6 +1048,16 @@ async def test_rpc_linkedgo_st802_thermostat(
assert (state := hass.states.get(entity_id))
assert state.state == HVACMode.OFF
# Test current temperature update
assert (state := hass.states.get(entity_id))
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 25.1
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "number:201", "value", 22.4)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.4
async def test_rpc_linkedgo_st1820_thermostat(
hass: HomeAssistant,

View File

@@ -574,6 +574,7 @@ async def test_duplicate_entry(hass: HomeAssistant) -> None:
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["data"][CONF_PLATFORM] == PLATFORM_BROADCAST
assert result["data"][CONF_API_KEY] == "mock api key"
assert result["data"][CONF_API_ENDPOINT] == "http://mock_api_endpoint"
assert result["options"][ATTR_PARSER] == PARSER_MD
# test: import 2nd entry failed due to duplicate

View File

@@ -1163,7 +1163,7 @@ async def test_edit_message_media(
mock.assert_called_once()
assert mock.call_args[1]["media"].__class__.__name__ == expected_media_class
assert mock.call_args[1]["media"].caption == "mock caption"
assert mock.call_args[1]["parse_mode"] == PARSER_MD
assert mock.call_args[1]["media"].parse_mode == PARSER_MD
assert mock.call_args[1]["chat_id"] == 123456
assert mock.call_args[1]["message_id"] == 12345
assert mock.call_args[1]["reply_markup"] == InlineKeyboardMarkup(

View File

@@ -2448,7 +2448,7 @@
'object_id_base': 'VPP backup reserve',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.BATTERY: 'battery'>,
'original_device_class': None,
'original_icon': None,
'original_name': 'VPP backup reserve',
'platform': 'tesla_fleet',
@@ -2463,7 +2463,6 @@
# name: test_sensors[sensor.energy_site_vpp_backup_reserve-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'battery',
'friendly_name': 'Energy Site VPP backup reserve',
'unit_of_measurement': '%',
}),
@@ -2478,7 +2477,6 @@
# name: test_sensors[sensor.energy_site_vpp_backup_reserve-statealt]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'battery',
'friendly_name': 'Energy Site VPP backup reserve',
'unit_of_measurement': '%',
}),

Some files were not shown because too many files have changed in this diff Show More