Compare commits

...

55 Commits

Author SHA1 Message Date
Bram Kragten
62ffeeccb0 Bump version to 2026.3.0b2 2026-03-02 19:32:14 +01:00
Bram Kragten
1afe00670e Update frontend to 20260302.0 (#164612) 2026-03-02 19:32:00 +01:00
Artur Pragacz
500ffe8153 Raise on vacuum area mapping not configured (#164595) 2026-03-02 19:31:59 +01:00
Jan-Philipp Benecke
2cebb28a1b Bump aiotankerkoenig to 0.5.1 (#164590) 2026-03-02 19:31:58 +01:00
Robert Resch
80bfba0981 Bump aiogithubapi to 26.0.0 (#164579) 2026-03-02 19:31:57 +01:00
Norbert Rittel
882e499375 Change one remaining string from "Overseerr" to "Seerr" (#164569) 2026-03-02 19:31:56 +01:00
Jan-Philipp Benecke
e89aafc8e2 Fix large WebDAV backup metadata download (#164563)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-03-02 19:31:56 +01:00
Jan-Philipp Benecke
66ae5ab543 Bump aiowebdav2 to 0.6.1 (#164560) 2026-03-02 19:31:54 +01:00
J. Nick Koston
75d39c0b02 Bump yalexs-ble to 3.2.7 (#164555) 2026-03-02 19:31:53 +01:00
Simone Chemelli
989133cb16 Bump aioamazondevices to 12.0.2 (#164518) 2026-03-02 19:31:52 +01:00
Allen Porter
f559f8e014 Update nest access token error handling to use specific OAuth2 token request exceptions (#164506) 2026-03-02 19:31:51 +01:00
willemstuursma
a95207f2ef Bump DSMR parser to 1.5.0 (#164484) 2026-03-02 19:31:50 +01:00
Tom Matheussen
2c28a93ea0 Require user code to be set when toggling Satel Integra switches (#164483) 2026-03-02 19:31:48 +01:00
Klaas Schoute
3ff97a0820 Update error handling messages for Powerfox Local integration (#164465) 2026-03-02 19:31:47 +01:00
Barry vd. Heuvel
f7a56447ae Bump weheat to 2026.2.28 (#164456) 2026-03-02 19:31:45 +01:00
Khole
dfd086f253 Hive - Bump pyhive-integration to v1.0.8 (#164453) 2026-03-02 19:31:44 +01:00
mettolen
b6a166ce48 Remove error translation placeholders from Airobot (#164436) 2026-03-02 19:31:43 +01:00
Stefan Agner
e93b724ce4 Fix Matter vacuum crash on nullable ServiceArea location info (#164411) 2026-03-02 19:31:42 +01:00
Franck Nijhof
d0b25ccc01 Reject relative paths in SFTP storage backup location config flow (#164408) 2026-03-02 19:31:41 +01:00
Joost Lekkerkerker
0a3ef64f28 Bump pySmartThings to 3.6.0 (#164397) 2026-03-02 19:31:40 +01:00
Joost Lekkerkerker
e9ce3ffff9 Fix SmartThings EHS power (#164395) 2026-03-02 19:31:39 +01:00
Joost Lekkerkerker
55415b1559 Add state for washing mop in SmartThings (#164348) 2026-03-02 19:31:37 +01:00
Paulus Schoutsen
0160dbf3a6 Add missing volume supported features to dunehd (#164343)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-02 19:31:36 +01:00
Franck Nijhof
7dd83b1e8f Mock firmware data during reauth flow init in airos tests (#164341) 2026-03-02 19:31:35 +01:00
Petro31
e502f5f249 Fix int vs float template sensor issue (#164339) 2026-03-02 19:31:34 +01:00
Johnny Willemsen
6e93ebc912 Update state labels to use common keys in indevolt (#164308) 2026-03-02 19:31:33 +01:00
Erwin Douna
9a4fdf7f80 Proxmox expand data descriptions (#164304) 2026-03-02 19:31:32 +01:00
TheJulianJES
76d69a5f53 Fix ZHA update entities not working after reload (#164290) 2026-03-02 19:31:30 +01:00
Raphael Hehl
ae40c0cf4b Bump uiprotect to version 10.2.2 (#164269)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-03-02 19:31:29 +01:00
Denis Shulyaka
078647d128 Create reauth flow for Anthropic for auth errors during conversation (#164267)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-03-02 19:31:28 +01:00
Artur Pragacz
8a637c4e5b Remove vacuum area mapping not configured issue (#164259) 2026-03-02 19:31:25 +01:00
Willem-Jan van Rootselaar
9e9daff26d Set entity_registry_enabled_default to False for total energy sensor (#164197) 2026-03-02 19:31:24 +01:00
James
41aeedaa82 Handle missing Daikin zone temperature keys (#164170)
Co-authored-by: barneyonline <barneyonline@users.noreply.github.com>
2026-03-02 19:31:23 +01:00
Kamil Breguła
a8297ae65d Add diagnostics platform to AWS S3 (#164118)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Erwin Douna <e.douna@gmail.com>
2026-03-02 19:31:22 +01:00
Joost Lekkerkerker
b7f1171c08 Rename Overseerr integration to Seerr (#164060) 2026-03-02 19:31:21 +01:00
Ye Zhiling
226f606cb9 Pass encoding to AtomicWriter in write_utf8_file_atomic (#164015) 2026-03-02 19:31:20 +01:00
HadiAyache
9472be39f2 Fix AccuWeather daily forecast crash when humidity average is missing (#163968)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-03-02 19:31:19 +01:00
nopoz
67a9e42b19 Google Cast: detect state and attributes when device is doing active non-media casting (#160819)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2026-03-02 19:31:17 +01:00
Simone Chemelli
ba1837859f Fix RpcSensorDescription for Shelly (#150719) 2026-03-02 19:31:16 +01:00
Franck Nijhof
4a301eceac Bump version to 2026.3.0b1 2026-02-26 19:32:15 +00:00
Bram Kragten
d138a99e62 Update frontend to 20260226.0 (#164262) 2026-02-26 19:31:52 +00:00
Johnny Willemsen
a431f84dc9 Update state labels to use common keys in compit (#164261) 2026-02-26 19:31:50 +00:00
epenet
aa9534600e Simplify portainer entity initialisation (#164256) 2026-02-26 19:31:49 +00:00
Denis Shulyaka
54fa49e754 Disable code interpreter with minimal reasoning for OpenAI (#164254) 2026-02-26 19:31:47 +00:00
Joost Lekkerkerker
459b6152f4 Remove invalid color mode from philips_js (#164204) 2026-02-26 19:31:46 +00:00
Denis Shulyaka
60c8d997ca Update reasoning options for gpt-5.3-codex (#164179) 2026-02-26 19:31:45 +00:00
AlCalzone
a598368895 Rename "Z-Wave Supervisor app" to "Z-Wave JS app" (#164147) 2026-02-26 19:31:43 +00:00
Erwin Douna
2ff1499c48 Fix stack devices merging with container devices in Portainer (#164135)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-02-26 19:31:42 +00:00
Norbert Rittel
348ddbe124 Replace "add-ons" with "apps" in backup issues (#164129) 2026-02-26 19:31:40 +00:00
Paulus Schoutsen
71ed43faf2 Simplify Anthropic integration name (#164124)
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-26 19:31:39 +00:00
mettolen
dc69a90296 Remove error translation placeholders from Saunum (#164121) 2026-02-26 19:31:37 +00:00
Liquidmasl
f5db8e6ba4 Sonarr post merge changes (#164112) 2026-02-26 19:31:36 +00:00
Artur Pragacz
b82a26ef68 Fix Matter vacuum clean area status check (#164108) 2026-02-26 19:31:35 +00:00
Maciej Bieniek
0eaaeedf11 Bump accuweather to 5.1.0 (#164034) 2026-02-26 19:31:33 +00:00
Franck Nijhof
62e26e53ac Bump version to 2026.3.0b0 2026-02-25 19:36:43 +00:00
111 changed files with 2180 additions and 496 deletions

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==5.0.0"]
"requirements": ["accuweather==5.1.0"]
}

View File

@@ -30,6 +30,8 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
)
return {
"can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT),
"can_reach_server": system_health.async_check_can_reach_url(
hass, str(ENDPOINT)
),
"remaining_requests": remaining_requests,
}

View File

@@ -191,7 +191,7 @@ class AccuWeatherEntity(
{
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCoverDay"],
ATTR_FORECAST_HUMIDITY: item["RelativeHumidityDay"]["Average"],
ATTR_FORECAST_HUMIDITY: item["RelativeHumidityDay"].get("Average"),
ATTR_FORECAST_NATIVE_TEMP: item["TemperatureMax"][ATTR_VALUE],
ATTR_FORECAST_NATIVE_TEMP_LOW: item["TemperatureMin"][ATTR_VALUE],
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperatureMax"][

View File

@@ -93,7 +93,6 @@ class AirobotNumber(AirobotEntity, NumberEntity):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="set_value_failed",
translation_placeholders={"error": str(err)},
) from err
else:
await self.coordinator.async_request_refresh()

View File

@@ -112,7 +112,7 @@
"message": "Failed to set temperature to {temperature}."
},
"set_value_failed": {
"message": "Failed to set value: {error}"
"message": "Failed to set value."
},
"switch_turn_off_failed": {
"message": "Failed to turn off {switch}."

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==12.0.0"]
"requirements": ["aioamazondevices==12.0.2"]
}

View File

@@ -858,6 +858,11 @@ class AnthropicBaseLLMEntity(Entity):
]
)
messages.extend(new_messages)
except anthropic.AuthenticationError as err:
self.entry.async_start_reauth(self.hass)
raise HomeAssistantError(
"Authentication error with Anthropic API, reauthentication required"
) from err
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"

View File

@@ -1,6 +1,6 @@
{
"domain": "anthropic",
"name": "Anthropic Conversation",
"name": "Anthropic",
"after_dependencies": ["assist_pipeline", "intent"],
"codeowners": ["@Shulyaka"],
"config_flow": true,

View File

@@ -30,5 +30,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.4"]
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.7"]
}

View File

@@ -0,0 +1,55 @@
"""Diagnostics support for AWS S3."""
from __future__ import annotations
import dataclasses
from typing import Any
from homeassistant.components.backup import (
DATA_MANAGER as BACKUP_DATA_MANAGER,
BackupManager,
)
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from .const import (
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_PREFIX,
CONF_SECRET_ACCESS_KEY,
DOMAIN,
)
from .coordinator import S3ConfigEntry
from .helpers import async_list_backups_from_s3
TO_REDACT = (CONF_ACCESS_KEY_ID, CONF_SECRET_ACCESS_KEY)
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: S3ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
backup_manager: BackupManager = hass.data[BACKUP_DATA_MANAGER]
backups = await async_list_backups_from_s3(
coordinator.client,
bucket=entry.data[CONF_BUCKET],
prefix=entry.data.get(CONF_PREFIX, ""),
)
data = {
"coordinator_data": dataclasses.asdict(coordinator.data),
"config": {
**entry.data,
**entry.options,
},
"backup_agents": [
{"name": agent.name}
for agent in backup_manager.backup_agents.values()
if agent.domain == DOMAIN
],
"backup": [backup.as_dict() for backup in backups],
}
return async_redact_data(data, TO_REDACT)

View File

@@ -43,7 +43,7 @@ rules:
# Gold
devices: done
diagnostics: todo
diagnostics: done
discovery-update-info:
status: exempt
comment: S3 is a cloud service that is not discovered on the network.

View File

@@ -43,11 +43,11 @@
"title": "The backup location {agent_id} is unavailable"
},
"automatic_backup_failed_addons": {
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
"title": "Not all add-ons could be included in automatic backup"
"description": "Apps {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
"title": "Not all apps could be included in automatic backup"
},
"automatic_backup_failed_agents_addons_folders": {
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Apps which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
"title": "Automatic backup was created with errors"
},
"automatic_backup_failed_create": {

View File

@@ -64,6 +64,8 @@ SENSOR_TYPES: tuple[BSBLanSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=0,
entity_registry_enabled_default=False,
value_fn=lambda data: (
data.sensor.total_energy.value
if data.sensor.total_energy is not None

View File

@@ -807,6 +807,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
# The lovelace app loops media to prevent timing out, don't show that
if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:
return MediaPlayerState.PLAYING
if (media_status := self._media_status()[0]) is not None:
if media_status.player_state == MEDIA_PLAYER_STATE_PLAYING:
return MediaPlayerState.PLAYING
@@ -817,19 +818,19 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
if media_status.player_is_idle:
return MediaPlayerState.IDLE
if self._chromecast is not None and self._chromecast.is_idle:
# If library consider us idle, that is our off state
# it takes HDMI status into account for cast devices.
return MediaPlayerState.OFF
if self.app_id in APP_IDS_UNRELIABLE_MEDIA_INFO:
# Some apps don't report media status, show the player as playing
return MediaPlayerState.PLAYING
if self.app_id is not None:
if self.app_id is not None and self.app_id != pychromecast.config.APP_BACKDROP:
# We have an active app
return MediaPlayerState.IDLE
if self._chromecast is not None and self._chromecast.is_idle:
# If library consider us idle, that is our off state
# it takes HDMI status into account for cast devices.
return MediaPlayerState.OFF
return None
@property

View File

@@ -324,8 +324,8 @@
"nano_nr_3": "Nano 3",
"nano_nr_4": "Nano 4",
"nano_nr_5": "Nano 5",
"off": "Off",
"on": "On",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]",
"summer": "Summer",
"winter": "Winter"
}
@@ -363,8 +363,8 @@
"pump_status": {
"name": "Pump status",
"state": {
"off": "Off",
"on": "On"
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"return_circuit_temperature": {

View File

@@ -115,7 +115,7 @@ def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
try:
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
except AttributeError:
except AttributeError, KeyError:
return ([], [])
return (list(heating or []), list(cooling or []))

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["dsmr_parser"],
"requirements": ["dsmr-parser==1.4.3"]
"requirements": ["dsmr-parser==1.5.0"]
}

View File

@@ -33,6 +33,8 @@ DUNEHD_PLAYER_SUPPORT: Final[MediaPlayerEntityFeature] = (
| MediaPlayerEntityFeature.PLAY
| MediaPlayerEntityFeature.PLAY_MEDIA
| MediaPlayerEntityFeature.BROWSE_MEDIA
| MediaPlayerEntityFeature.VOLUME_STEP
| MediaPlayerEntityFeature.VOLUME_MUTE
)

View File

@@ -21,5 +21,5 @@
"integration_type": "system",
"preview_features": { "winter_mode": {} },
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20260225.0"]
"requirements": ["home-assistant-frontend==20260302.0"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aiogithubapi"],
"requirements": ["aiogithubapi==24.6.0"]
"requirements": ["aiogithubapi==26.0.0"]
}

View File

@@ -10,5 +10,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["apyhiveapi"],
"requirements": ["pyhive-integration==1.0.7"]
"requirements": ["pyhive-integration==1.0.8"]
}

View File

@@ -57,8 +57,8 @@
"battery_charge_discharge_state": {
"name": "Battery charge/discharge state",
"state": {
"charging": "Charging",
"discharging": "Discharging",
"charging": "[%key:common::state::charging%]",
"discharging": "[%key:common::state::discharging%]",
"static": "Static"
}
},

View File

@@ -168,8 +168,9 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
segments: dict[str, Segment] = {}
for area in supported_areas:
area_name = None
if area.areaInfo and area.areaInfo.locationInfo:
area_name = area.areaInfo.locationInfo.locationName
location_info = area.areaInfo.locationInfo
if location_info not in (None, clusters.NullValue):
area_name = location_info.locationName
if area_name:
segment_id = str(area.areaID)
@@ -206,10 +207,11 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
if (
response
and response.status != clusters.ServiceArea.Enums.SelectAreasStatus.kSuccess
and response["status"]
!= clusters.ServiceArea.Enums.SelectAreasStatus.kSuccess
):
raise HomeAssistantError(
f"Failed to select areas: {response.statusText or response.status.name}"
f"Failed to select areas: {response['statusText'] or response['status']}"
)
await self.send_device_command(

View File

@@ -7,7 +7,7 @@ import asyncio
from http import HTTPStatus
import logging
from aiohttp import ClientError, ClientResponseError, web
from aiohttp import ClientError, web
from google_nest_sdm.camera_traits import CameraClipPreviewTrait
from google_nest_sdm.device import Device
from google_nest_sdm.device_manager import DeviceManager
@@ -43,6 +43,8 @@ from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
OAuth2TokenRequestError,
OAuth2TokenRequestReauthError,
Unauthorized,
)
from homeassistant.helpers import (
@@ -253,11 +255,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
auth = await api.new_auth(hass, entry)
try:
await auth.async_get_access_token()
except ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="reauth_required"
) from err
except OAuth2TokenRequestReauthError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="reauth_required"
) from err
except OAuth2TokenRequestError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN, translation_key="auth_server_error"
) from err

View File

@@ -512,6 +512,11 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
options.pop(CONF_WEB_SEARCH_REGION, None)
options.pop(CONF_WEB_SEARCH_COUNTRY, None)
options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
if (
user_input.get(CONF_CODE_INTERPRETER)
and user_input.get(CONF_REASONING_EFFORT) == "minimal"
):
errors[CONF_CODE_INTERPRETER] = "code_interpreter_minimal_reasoning"
options.update(user_input)
if not errors:
@@ -539,15 +544,15 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
if not model.startswith(("o", "gpt-5")) or model.startswith("gpt-5-pro"):
return []
MODELS_REASONING_MAP = {
models_reasoning_map: dict[str | tuple[str, ...], list[str]] = {
"gpt-5.2-pro": ["medium", "high", "xhigh"],
"gpt-5.2": ["none", "low", "medium", "high", "xhigh"],
("gpt-5.2", "gpt-5.3"): ["none", "low", "medium", "high", "xhigh"],
"gpt-5.1": ["none", "low", "medium", "high"],
"gpt-5": ["minimal", "low", "medium", "high"],
"": ["low", "medium", "high"], # The default case
}
for prefix, options in MODELS_REASONING_MAP.items():
for prefix, options in models_reasoning_map.items():
if model.startswith(prefix):
return options
return [] # pragma: no cover

View File

@@ -38,6 +38,7 @@
},
"entry_type": "AI task",
"error": {
"code_interpreter_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::code_interpreter_minimal_reasoning%]",
"model_not_supported": "[%key:component::openai_conversation::config_subentries::conversation::error::model_not_supported%]",
"web_search_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::web_search_minimal_reasoning%]"
},
@@ -93,6 +94,7 @@
},
"entry_type": "Conversation agent",
"error": {
"code_interpreter_minimal_reasoning": "Code interpreter is not supported with minimal reasoning effort",
"model_not_supported": "This model is not supported, please select a different model",
"web_search_minimal_reasoning": "Web search is currently not supported with minimal reasoning effort"
},

View File

@@ -69,7 +69,7 @@ class OverseerrConfigFlow(ConfigFlow, domain=DOMAIN):
else:
if self.source == SOURCE_USER:
return self.async_create_entry(
title="Overseerr",
title="Seerr",
data={
CONF_HOST: host,
CONF_PORT: port,

View File

@@ -1,6 +1,6 @@
{
"domain": "overseerr",
"name": "Overseerr",
"name": "Seerr",
"after_dependencies": ["cloud"],
"codeowners": ["@joostlek", "@AmGarera"],
"config_flow": true,

View File

@@ -25,8 +25,8 @@
"url": "[%key:common::config_flow::data::url%]"
},
"data_description": {
"api_key": "The API key of the Overseerr instance.",
"url": "The URL of the Overseerr instance."
"api_key": "The API key of the Seerr instance.",
"url": "The URL of the Seerr instance."
}
}
}
@@ -114,7 +114,7 @@
"message": "[%key:common::config_flow::error::invalid_api_key%]"
},
"connection_error": {
"message": "Error connecting to the Overseerr instance: {error}"
"message": "Error connecting to the Seerr instance: {error}"
}
},
"selector": {
@@ -137,11 +137,11 @@
},
"services": {
"get_requests": {
"description": "Retrieves a list of media requests from Overseerr.",
"description": "Retrieves a list of media requests from Seerr.",
"fields": {
"config_entry_id": {
"description": "The Overseerr instance to get requests from.",
"name": "Overseerr instance"
"description": "The Seerr instance to get requests from.",
"name": "Seerr instance"
},
"requested_by": {
"description": "Filter the requests by the user ID that requested them.",

View File

@@ -137,11 +137,10 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
_attr_effect: str
_attr_translation_key = "ambilight"
_attr_supported_color_modes = {ColorMode.HS}
_attr_supported_features = LightEntityFeature.EFFECT
def __init__(
self,
coordinator: PhilipsTVDataUpdateCoordinator,
) -> None:
def __init__(self, coordinator: PhilipsTVDataUpdateCoordinator) -> None:
"""Initialize light."""
self._tv = coordinator.api
self._hs = None
@@ -150,8 +149,6 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
self._last_selected_effect: AmbilightEffect | None = None
super().__init__(coordinator)
self._attr_supported_color_modes = {ColorMode.HS, ColorMode.ONOFF}
self._attr_supported_features = LightEntityFeature.EFFECT
self._attr_unique_id = coordinator.unique_id
self._update_from_coordinator()

View File

@@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import CONTAINER_STATE_RUNNING, STACK_STATUS_ACTIVE
from .coordinator import PortainerContainerData, PortainerCoordinator
from .coordinator import PortainerContainerData
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
@@ -165,18 +165,6 @@ class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
entity_description: PortainerEndpointBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerEndpointBinarySensorEntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize Portainer endpoint binary sensor entity."""
self.entity_description = entity_description
super().__init__(device_info, coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
@@ -188,19 +176,6 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
entity_description: PortainerContainerBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerContainerBinarySensorEntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
@@ -212,19 +187,6 @@ class PortainerStackSensor(PortainerStackEntity, BinarySensorEntity):
entity_description: PortainerStackBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackBinarySensorEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""

View File

@@ -167,18 +167,6 @@ class PortainerEndpointButton(PortainerEndpointEntity, PortainerBaseButton):
entity_description: PortainerButtonDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerButtonDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer endpoint button entity."""
self.entity_description = entity_description
super().__init__(device_info, coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
async def _async_press_call(self) -> None:
"""Call the endpoint button press action."""
await self.entity_description.press_action(
@@ -191,19 +179,6 @@ class PortainerContainerButton(PortainerContainerEntity, PortainerBaseButton):
entity_description: PortainerButtonDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerButtonDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer button entity."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
async def _async_press_call(self) -> None:
"""Call the container button press action."""
await self.entity_description.press_action(

View File

@@ -4,6 +4,7 @@ from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEFAULT_NAME, DOMAIN
@@ -26,11 +27,13 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
def __init__(
self,
device_info: PortainerCoordinatorData,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer endpoint."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.device_id = device_info.endpoint.id
self._attr_device_info = DeviceInfo(
@@ -45,6 +48,7 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
name=device_info.endpoint.name,
entry_type=DeviceEntryType.SERVICE,
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
@property
def available(self) -> bool:
@@ -57,12 +61,14 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
def __init__(
self,
device_info: PortainerContainerData,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer container."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.device_id = self._device_info.container.id
self.endpoint_id = via_device.endpoint.id
@@ -91,13 +97,14 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
# else it's the endpoint
via_device=(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{device_info.stack.name}"
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{device_info.stack.id}"
if device_info.stack
else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
translation_key=None if self.device_name else "unknown_container",
entry_type=DeviceEntryType.SERVICE,
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def available(self) -> bool:
@@ -119,12 +126,14 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
def __init__(
self,
device_info: PortainerStackData,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer stack."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.stack_id = device_info.stack.id
self.device_name = device_info.stack.name
@@ -135,7 +144,7 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}",
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{self.stack_id}",
)
},
manufacturer=DEFAULT_NAME,
@@ -149,6 +158,7 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.stack_id}_{entity_description.key}"
@property
def available(self) -> bool:

View File

@@ -21,7 +21,6 @@ from .const import STACK_TYPE_COMPOSE, STACK_TYPE_KUBERNETES, STACK_TYPE_SWARM
from .coordinator import (
PortainerConfigEntry,
PortainerContainerData,
PortainerCoordinator,
PortainerStackData,
)
from .entity import (
@@ -398,19 +397,6 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
entity_description: PortainerContainerSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerContainerSensorEntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
@@ -422,18 +408,6 @@ class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
entity_description: PortainerEndpointSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerEndpointSensorEntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer endpoint sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
@@ -446,19 +420,6 @@ class PortainerStackSensor(PortainerStackEntity, SensorEntity):
entity_description: PortainerStackSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackSensorEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""

View File

@@ -167,19 +167,6 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
entity_description: PortainerSwitchEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerSwitchEntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container switch."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return the state of the device."""
@@ -209,19 +196,6 @@ class PortainerStackSwitch(PortainerStackEntity, SwitchEntity):
entity_description: PortainerStackSwitchEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackSwitchEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack switch."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return the state of the device."""

View File

@@ -49,12 +49,12 @@ class PowerfoxLocalDataUpdateCoordinator(DataUpdateCoordinator[LocalResponse]):
except PowerfoxAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": str(err)},
translation_key="auth_failed",
translation_placeholders={"host": self.config_entry.data[CONF_HOST]},
) from err
except PowerfoxConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={"error": str(err)},
translation_key="connection_error",
translation_placeholders={"host": self.config_entry.data[CONF_HOST]},
) from err

View File

@@ -56,11 +56,11 @@
}
},
"exceptions": {
"invalid_auth": {
"message": "Error while authenticating with the device: {error}"
"auth_failed": {
"message": "Authentication with the Poweropti device at {host} failed. Please check your API key."
},
"update_failed": {
"message": "Error while updating the device: {error}"
"connection_error": {
"message": "Could not connect to the Poweropti device at {host}. Please check if the device is online and reachable."
}
}
}

View File

@@ -32,6 +32,14 @@
"username": "[%key:common::config_flow::data::username%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"host": "[%key:component::proxmoxve::config::step::user::data_description::host%]",
"password": "[%key:component::proxmoxve::config::step::user::data_description::password%]",
"port": "[%key:component::proxmoxve::config::step::user::data_description::port%]",
"realm": "[%key:component::proxmoxve::config::step::user::data_description::realm%]",
"username": "[%key:component::proxmoxve::config::step::user::data_description::username%]",
"verify_ssl": "[%key:component::proxmoxve::config::step::user::data_description::verify_ssl%]"
},
"description": "Use the following form to reconfigure your Proxmox VE server connection.",
"title": "Reconfigure Proxmox VE integration"
},
@@ -44,6 +52,14 @@
"username": "[%key:common::config_flow::data::username%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"host": "The hostname or IP address of your Proxmox VE server",
"password": "The password for the Proxmox VE server",
"port": "The port of your Proxmox VE server (default: 8006)",
"realm": "The authentication realm for the Proxmox VE server (default: 'pam')",
"username": "The username for the Proxmox VE server",
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
},
"description": "Enter your Proxmox VE server details to set up the integration.",
"title": "Connect to Proxmox VE"
}

View File

@@ -162,6 +162,11 @@
}
}
},
"exceptions": {
"missing_output_access_code": {
"message": "Cannot control switchable outputs because no user code is configured for this Satel Integra entry. Configure a code in the integration options to enable output control."
}
},
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your existing configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the `{domain}` YAML configuration from your configuration.yaml file and add the {integration_title} integration manually.",

View File

@@ -8,9 +8,14 @@ from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import CONF_CODE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import CONF_SWITCHABLE_OUTPUT_NUMBER, SUBENTRY_TYPE_SWITCHABLE_OUTPUT
from .const import (
CONF_SWITCHABLE_OUTPUT_NUMBER,
DOMAIN,
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
)
from .coordinator import SatelConfigEntry, SatelIntegraOutputsCoordinator
from .entity import SatelIntegraEntity
@@ -83,12 +88,24 @@ class SatelIntegraSwitch(
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
if self._code is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="missing_output_access_code",
)
await self._controller.set_output(self._code, self._device_number, True)
self._attr_is_on = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
if self._code is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="missing_output_access_code",
)
await self._controller.set_output(self._code, self._device_number, False)
self._attr_is_on = False
self.async_write_ha_state()

View File

@@ -269,7 +269,6 @@ class LeilSaunaClimate(LeilSaunaEntity, ClimateEntity):
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="start_session_failed",
translation_placeholders={"error": str(err)},
) from err
await self.coordinator.async_request_refresh()

View File

@@ -47,5 +47,4 @@ class LeilSaunaCoordinator(DataUpdateCoordinator[SaunumData]):
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="communication_error",
translation_placeholders={"error": str(err)},
) from err

View File

@@ -88,7 +88,7 @@
},
"exceptions": {
"communication_error": {
"message": "Communication error: {error}"
"message": "Communication error with sauna control unit"
},
"door_open": {
"message": "Cannot start sauna session when sauna door is open"
@@ -130,7 +130,7 @@
"message": "Failed to set temperature to {temperature}"
},
"start_session_failed": {
"message": "Failed to start sauna session: {error}"
"message": "Failed to start sauna session"
}
},
"options": {

View File

@@ -124,6 +124,17 @@ class SFTPFlowHandler(ConfigFlow, domain=DOMAIN):
}
)
if not user_input[CONF_BACKUP_LOCATION].startswith("/"):
errors[CONF_BACKUP_LOCATION] = "backup_location_relative"
return self.async_show_form(
step_id=step_id,
data_schema=self.add_suggested_values_to_schema(
DATA_SCHEMA, user_input
),
description_placeholders=placeholders,
errors=errors,
)
try:
# Validate auth input and save uploaded key file if provided
user_input = await self._validate_auth_and_save_keyfile(user_input)

View File

@@ -4,6 +4,7 @@
"already_configured": "Integration already configured. Host with same address, port and backup location already exists."
},
"error": {
"backup_location_relative": "The remote path must be an absolute path (starting with `/`).",
"invalid_key": "Invalid key uploaded. Please make sure key corresponds to valid SSH key algorithm.",
"key_or_password_needed": "Please configure password or private key file location for SFTP Storage.",
"os_error": "{error_message}. Please check if host and/or port are correct.",

View File

@@ -66,6 +66,7 @@ from .repairs import (
from .services import async_setup_services
from .utils import (
async_create_issue_unsupported_firmware,
async_migrate_rpc_sensor_description_unique_ids,
async_migrate_rpc_virtual_components_unique_ids,
get_coap_context,
get_device_entry_gen,
@@ -296,6 +297,12 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
runtime_data = entry.runtime_data
runtime_data.platforms = RPC_SLEEPING_PLATFORMS
await er.async_migrate_entries(
hass,
entry.entry_id,
async_migrate_rpc_sensor_description_unique_ids,
)
if sleep_period == 0:
# Not a sleeping device, finish setup
LOGGER.debug("Setting up online RPC device %s", entry.title)

View File

@@ -1220,7 +1220,7 @@ RPC_SENSORS: Final = {
entity_category=EntityCategory.DIAGNOSTIC,
use_polling_coordinator=True,
),
"temperature_0": RpcSensorDescription(
"temperature_tc": RpcSensorDescription(
key="temperature",
sub_key="tC",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
@@ -1249,7 +1249,7 @@ RPC_SENSORS: Final = {
entity_category=EntityCategory.DIAGNOSTIC,
use_polling_coordinator=True,
),
"humidity_0": RpcSensorDescription(
"humidity_rh": RpcSensorDescription(
key="humidity",
sub_key="rh",
native_unit_of_measurement=PERCENTAGE,

View File

@@ -969,6 +969,30 @@ def format_ble_addr(ble_addr: str) -> str:
return ble_addr.replace(":", "").upper()
@callback
def async_migrate_rpc_sensor_description_unique_ids(
entity_entry: er.RegistryEntry,
) -> dict[str, Any] | None:
"""Migrate RPC sensor unique_ids after sensor description key rename."""
unique_id_map = {
"-temperature_0": "-temperature_tc",
"-humidity_0": "-humidity_rh",
}
for old_suffix, new_suffix in unique_id_map.items():
if entity_entry.unique_id.endswith(old_suffix):
new_unique_id = entity_entry.unique_id.removesuffix(old_suffix) + new_suffix
LOGGER.debug(
"Migrating unique_id for %s entity from [%s] to [%s]",
entity_entry.entity_id,
entity_entry.unique_id,
new_unique_id,
)
return {"new_unique_id": new_unique_id}
return None
@callback
def async_migrate_rpc_virtual_components_unique_ids(
config: dict[str, Any], entity_entry: er.RegistryEntry

View File

@@ -34,5 +34,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmartthings"],
"quality_scale": "bronze",
"requirements": ["pysmartthings==3.5.3"]
"requirements": ["pysmartthings==3.6.0"]
}

View File

@@ -95,6 +95,7 @@ ROBOT_CLEANER_TURBO_MODE_STATE_MAP = {
ROBOT_CLEANER_MOVEMENT_MAP = {
"powerOff": "off",
"washingMop": "washing_mop",
}
OVEN_MODE = {
@@ -161,6 +162,13 @@ class SmartThingsSensorEntityDescription(SensorEntityDescription):
use_temperature_unit: bool = False
deprecated: Callable[[ComponentStatus], tuple[str, str] | None] | None = None
component_translation_key: dict[str, str] | None = None
presentation_fn: (
Callable[
[str | None, str | float | int | datetime | None],
str | float | int | datetime | None,
]
| None
) = None
CAPABILITY_TO_SENSORS: dict[
@@ -762,6 +770,13 @@ CAPABILITY_TO_SENSORS: dict[
(value := cast(dict | None, status.value)) is not None
and "power" in value
),
presentation_fn=lambda presentation_id, value: (
value * 1000
if presentation_id is not None
and "EHS" in presentation_id
and isinstance(value, (int, float))
else value
),
),
SmartThingsSensorEntityDescription(
key="deltaEnergy_meter",
@@ -880,6 +895,7 @@ CAPABILITY_TO_SENSORS: dict[
"after",
"cleaning",
"pause",
"washing_mop",
],
device_class=SensorDeviceClass.ENUM,
value_fn=lambda value: ROBOT_CLEANER_MOVEMENT_MAP.get(value, value),
@@ -1345,7 +1361,12 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity):
res = self.get_attribute_value(self.capability, self._attribute)
if options_map := self.entity_description.options_map:
return options_map.get(res)
return self.entity_description.value_fn(res)
value = self.entity_description.value_fn(res)
if self.entity_description.presentation_fn:
value = self.entity_description.presentation_fn(
self.device.device.presentation_id, value
)
return value
@property
def native_unit_of_measurement(self) -> str | None:

View File

@@ -718,7 +718,8 @@
"off": "[%key:common::state::off%]",
"pause": "[%key:common::state::paused%]",
"point": "Point",
"reserve": "Reserve"
"reserve": "Reserve",
"washing_mop": "Washing mop"
}
},
"robot_cleaner_turbo_mode": {

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from dataclasses import fields
from aiopyarr.models.host_configuration import PyArrHostConfiguration
from aiopyarr.sonarr_client import SonarrClient
@@ -37,7 +39,6 @@ from .coordinator import (
SeriesDataUpdateCoordinator,
SonarrConfigEntry,
SonarrData,
SonarrDataUpdateCoordinator,
StatusDataUpdateCoordinator,
WantedDataUpdateCoordinator,
)
@@ -89,16 +90,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: SonarrConfigEntry) -> bo
)
# Temporary, until we add diagnostic entities
_version = None
coordinators: list[SonarrDataUpdateCoordinator] = [
data.upcoming,
data.commands,
data.diskspace,
data.queue,
data.series,
data.status,
data.wanted,
]
for coordinator in coordinators:
for field in fields(data):
coordinator = getattr(data, field.name)
await coordinator.async_config_entry_first_refresh()
if isinstance(coordinator, StatusDataUpdateCoordinator):
_version = coordinator.data.version

View File

@@ -128,35 +128,6 @@ def format_queue(
return shows
def format_episode_item(
series: SonarrSeries, episode_data: dict[str, Any], base_url: str | None = None
) -> dict[str, Any]:
"""Format a single episode item."""
result: dict[str, Any] = {
"id": episode_data.get("id"),
"episode_number": episode_data.get("episodeNumber"),
"season_number": episode_data.get("seasonNumber"),
"title": episode_data.get("title"),
"air_date": str(episode_data.get("airDate", "")),
"overview": episode_data.get("overview"),
"has_file": episode_data.get("hasFile", False),
"monitored": episode_data.get("monitored", False),
}
# Add episode images if available
if images := episode_data.get("images"):
result["images"] = {}
for image in images:
cover_type = image.coverType
# Prefer remoteUrl (public TVDB URL) over local path
if remote_url := getattr(image, "remoteUrl", None):
result["images"][cover_type] = remote_url
elif base_url and (url := getattr(image, "url", None)):
result["images"][cover_type] = f"{base_url.rstrip('/')}{url}"
return result
def format_series(
series_list: list[SonarrSeries], base_url: str | None = None
) -> dict[str, dict[str, Any]]:

View File

@@ -46,7 +46,7 @@ CONF_SEASON_NUMBER = "season_number"
CONF_SPACE_UNIT = "space_unit"
# Valid space units
SPACE_UNITS = ["bytes", "kb", "kib", "mb", "mib", "gb", "gib", "tb", "tib", "pb", "pib"]
SPACE_UNITS = ["bytes", "KB", "KiB", "MB", "MiB", "GB", "GiB", "TB", "TiB", "PB", "PiB"]
DEFAULT_SPACE_UNIT = "bytes"
# Default values - 0 means no limit

View File

@@ -78,7 +78,7 @@
"name": "Sonarr entry"
},
"space_unit": {
"description": "Unit for space values. Use binary units (kib, mib, gib, tib, pib) for 1024-based values or decimal units (kb, mb, gb, tb, pb) for 1000-based values.",
"description": "Unit for space values. Use binary units (KiB, MiB, GiB, TiB, PiB) for 1024-based values or decimal units (KB, MB, GB, TB, PB) for 1000-based values. The default is bytes.",
"name": "Space unit"
}
},

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aiotankerkoenig"],
"quality_scale": "platinum",
"requirements": ["aiotankerkoenig==0.4.2"]
"requirements": ["aiotankerkoenig==0.5.1"]
}

View File

@@ -257,6 +257,9 @@ class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
) -> StateType | date | datetime | Decimal | None:
"""Validate the state."""
if self._numeric_state_expected:
if not isinstance(result, bool) and isinstance(result, (int, float)):
return result
return template_validators.number(self, CONF_STATE)(result)
if result is None or self.device_class not in (

View File

@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["uiprotect==10.2.1", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==10.2.2", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -23,6 +23,7 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
@@ -63,7 +64,6 @@ SERVICE_STOP = "stop"
DEFAULT_NAME = "Vacuum cleaner robot"
ISSUE_SEGMENTS_CHANGED = "segments_changed"
ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED = "segments_mapping_not_configured"
_BATTERY_DEPRECATION_IGNORED_PLATFORMS = ("template",)
@@ -236,12 +236,6 @@ class StateVacuumEntity(
if self.__vacuum_legacy_battery_icon:
self._report_deprecated_battery_properties("battery_icon")
@callback
def async_write_ha_state(self) -> None:
"""Write the state to the state machine."""
super().async_write_ha_state()
self._async_check_segments_issues()
@callback
def async_registry_entry_updated(self) -> None:
"""Run when the entity registry entry has been updated."""
@@ -444,7 +438,14 @@ class StateVacuumEntity(
)
options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {})
area_mapping: dict[str, list[str]] = options.get("area_mapping", {})
area_mapping: dict[str, list[str]] | None = options.get("area_mapping")
if area_mapping is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="area_mapping_not_configured",
translation_placeholders={"entity_id": self.entity_id},
)
# We use a dict to preserve the order of segments.
segment_ids: dict[str, None] = {}
@@ -514,43 +515,6 @@ class StateVacuumEntity(
return
options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {})
should_have_not_configured_issue = (
VacuumEntityFeature.CLEAN_AREA in self.supported_features
and options.get("area_mapping") is None
)
if (
should_have_not_configured_issue
and not self._segments_not_configured_issue_created
):
issue_id = (
f"{ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED}_{self.registry_entry.id}"
)
ir.async_create_issue(
self.hass,
DOMAIN,
issue_id,
data={
"entry_id": self.registry_entry.id,
"entity_id": self.entity_id,
},
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key=ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED,
translation_placeholders={
"entity_id": self.entity_id,
},
)
self._segments_not_configured_issue_created = True
elif (
not should_have_not_configured_issue
and self._segments_not_configured_issue_created
):
issue_id = (
f"{ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED}_{self.registry_entry.id}"
)
ir.async_delete_issue(self.hass, DOMAIN, issue_id)
self._segments_not_configured_issue_created = False
if self._segments_changed_last_seen is not None and (
VacuumEntityFeature.CLEAN_AREA not in self.supported_features

View File

@@ -89,14 +89,15 @@
}
}
},
"exceptions": {
"area_mapping_not_configured": {
"message": "Area mapping is not configured for `{entity_id}`. Configure the segment-to-area mapping before using this action."
}
},
"issues": {
"segments_changed": {
"description": "",
"title": "Vacuum segments have changed for {entity_id}"
},
"segments_mapping_not_configured": {
"description": "",
"title": "Vacuum segment mapping not configured for {entity_id}"
}
},
"selector": {

View File

@@ -222,8 +222,10 @@ class WebDavBackupAgent(BackupAgent):
async def _download_metadata(path: str) -> AgentBackup:
"""Download metadata file."""
iterator = await self._client.download_iter(path)
metadata = await anext(iterator)
return AgentBackup.from_dict(json_loads_object(metadata))
metadata_bytes = bytearray()
async for chunk in iterator:
metadata_bytes.extend(chunk)
return AgentBackup.from_dict(json_loads_object(metadata_bytes))
async def _list_metadata_files() -> dict[str, AgentBackup]:
"""List metadata files."""

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aiowebdav2"],
"quality_scale": "bronze",
"requirements": ["aiowebdav2==0.5.0"]
"requirements": ["aiowebdav2==0.6.1"]
}

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/weheat",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["weheat==2026.1.25"]
"requirements": ["weheat==2026.2.28"]
}

View File

@@ -14,5 +14,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["socketio", "engineio", "yalexs"],
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.4"]
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.7"]
}

View File

@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["yalexs-ble==3.2.4"]
"requirements": ["yalexs-ble==3.2.7"]
}

View File

@@ -274,6 +274,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload ZHA config entry."""
if not await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS):
return False
ha_zha_data = get_zha_data(hass)
ha_zha_data.config_entry = None
@@ -281,6 +284,8 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
await ha_zha_data.gateway_proxy.shutdown()
ha_zha_data.gateway_proxy = None
ha_zha_data.update_coordinator = None
# clean up any remaining entity metadata
# (entities that have been discovered but not yet added to HA)
# suppress KeyError because we don't know what state we may
@@ -291,7 +296,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
websocket_api.async_unload_api(hass)
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
return True
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:

View File

@@ -1,13 +1,13 @@
{
"config": {
"abort": {
"addon_get_discovery_info_failed": "Failed to get Z-Wave app discovery info.",
"addon_info_failed": "Failed to get Z-Wave app info.",
"addon_install_failed": "Failed to install the Z-Wave app.",
"addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave Supervisor app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).",
"addon_set_config_failed": "Failed to set Z-Wave configuration.",
"addon_start_failed": "Failed to start the Z-Wave app.",
"addon_stop_failed": "Failed to stop the Z-Wave app.",
"addon_get_discovery_info_failed": "Failed to get Z-Wave JS app discovery info.",
"addon_info_failed": "Failed to get Z-Wave JS app info.",
"addon_install_failed": "Failed to install the Z-Wave JS app.",
"addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave JS app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).",
"addon_set_config_failed": "Failed to set Z-Wave JS app configuration.",
"addon_start_failed": "Failed to start the Z-Wave JS app.",
"addon_stop_failed": "Failed to stop the Z-Wave JS app.",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"backup_failed": "Failed to back up network.",
@@ -17,15 +17,15 @@
"discovery_requires_supervisor": "Discovery requires the Home Assistant Supervisor.",
"migration_low_sdk_version": "The SDK version of the old adapter is lower than {ok_sdk_version}. This means it's not possible to migrate the non-volatile memory (NVM) of the old adapter to another adapter.\n\nCheck the documentation on the manufacturer support pages of the old adapter, if it's possible to upgrade the firmware of the old adapter to a version that is built with SDK version {ok_sdk_version} or higher.",
"migration_successful": "Migration successful.",
"not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave app.",
"not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave JS app.",
"not_zwave_device": "Discovered device is not a Z-Wave device.",
"not_zwave_js_addon": "Discovered app is not the official Z-Wave app.",
"not_zwave_js_addon": "Discovered app is not the official Z-Wave JS app.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"reset_failed": "Failed to reset adapter.",
"usb_ports_failed": "Failed to get USB devices."
},
"error": {
"addon_start_failed": "Failed to start the Z-Wave app. Check the configuration.",
"addon_start_failed": "Failed to start the Z-Wave JS app. Check the configuration.",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_ws_url": "Invalid websocket URL",
"unknown": "[%key:common::config_flow::error::unknown%]"
@@ -65,7 +65,7 @@
"usb_path": "[%key:common::config_flow::data::usb_path%]"
},
"description": "Select your Z-Wave adapter",
"title": "Enter the Z-Wave app configuration"
"title": "Enter the Z-Wave JS app configuration"
},
"configure_security_keys": {
"data": {
@@ -84,7 +84,7 @@
"title": "Migrate to a new adapter"
},
"hassio_confirm": {
"description": "Do you want to set up the Z-Wave integration with the Z-Wave app?"
"description": "Do you want to set up the Z-Wave integration with the Z-Wave JS app?"
},
"install_addon": {
"title": "Installing app"
@@ -127,9 +127,9 @@
},
"on_supervisor": {
"data": {
"use_addon": "Use the Z-Wave Supervisor app"
"use_addon": "Use the Z-Wave JS app"
},
"description": "Do you want to use the Z-Wave Supervisor app?",
"description": "Do you want to use the Z-Wave JS app?",
"title": "Select connection method"
},
"on_supervisor_reconfigure": {

View File

@@ -17,7 +17,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2026
MINOR_VERSION: Final = 3
PATCH_VERSION: Final = "0.dev0"
PATCH_VERSION: Final = "0b2"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 14, 2)

View File

@@ -381,7 +381,7 @@
"iot_class": "local_push"
},
"anthropic": {
"name": "Anthropic Conversation",
"name": "Anthropic",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
@@ -5042,7 +5042,7 @@
"iot_class": "local_polling"
},
"overseerr": {
"name": "Overseerr",
"name": "Seerr",
"integration_type": "service",
"config_flow": true,
"iot_class": "local_push"

View File

@@ -3,7 +3,7 @@
aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
aiodns==4.0.0
aiogithubapi==24.6.0
aiogithubapi==26.0.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0
@@ -40,7 +40,7 @@ habluetooth==5.8.0
hass-nabucasa==1.15.0
hassil==3.5.0
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20260225.0
home-assistant-frontend==20260302.0
home-assistant-intents==2026.2.13
httpx==0.28.1
ifaddr==0.2.0

View File

@@ -32,8 +32,11 @@ def write_utf8_file_atomic(
Using this function frequently will significantly
negatively impact performance.
"""
encoding = "utf-8" if "b" not in mode else None
try:
with AtomicWriter(filename, mode=mode, overwrite=True).open() as fdesc:
with AtomicWriter( # type: ignore[call-arg] # atomicwrites-stubs is outdated, encoding is a valid kwarg
filename, mode=mode, overwrite=True, encoding=encoding
).open() as fdesc:
if not private:
os.fchmod(fdesc.fileno(), 0o644)
fdesc.write(utf8_data)

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2026.3.0.dev0"
version = "2026.3.0b2"
license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3."
@@ -27,7 +27,7 @@ dependencies = [
# aiogithubapi is needed by frontend; frontend is unconditionally imported at
# module level in `bootstrap.py` and its requirements thus need to be in
# requirements.txt to ensure they are always installed
"aiogithubapi==24.6.0",
"aiogithubapi==26.0.0",
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11

2
requirements.txt generated
View File

@@ -4,7 +4,7 @@
# Home Assistant Core
aiodns==4.0.0
aiogithubapi==24.6.0
aiogithubapi==26.0.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0

24
requirements_all.txt generated
View File

@@ -130,7 +130,7 @@ TwitterAPI==2.7.12
WSDiscovery==2.1.2
# homeassistant.components.accuweather
accuweather==5.0.0
accuweather==5.1.0
# homeassistant.components.actron_air
actron-neo-api==0.4.1
@@ -190,7 +190,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.5
# homeassistant.components.alexa_devices
aioamazondevices==12.0.0
aioamazondevices==12.0.2
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -270,7 +270,7 @@ aioftp==0.21.3
aioghost==0.4.0
# homeassistant.components.github
aiogithubapi==24.6.0
aiogithubapi==26.0.0
# homeassistant.components.guardian
aioguardian==2026.01.1
@@ -419,7 +419,7 @@ aioswitcher==6.1.0
aiosyncthing==0.7.1
# homeassistant.components.tankerkoenig
aiotankerkoenig==0.4.2
aiotankerkoenig==0.5.1
# homeassistant.components.tedee
aiotedee==0.2.25
@@ -446,7 +446,7 @@ aiowaqi==3.1.0
aiowatttime==0.1.1
# homeassistant.components.webdav
aiowebdav2==0.5.0
aiowebdav2==0.6.1
# homeassistant.components.webostv
aiowebostv==0.7.5
@@ -831,7 +831,7 @@ dremel3dpy==2.1.1
dropmqttapi==1.0.3
# homeassistant.components.dsmr
dsmr-parser==1.4.3
dsmr-parser==1.5.0
# homeassistant.components.dwd_weather_warnings
dwdwfsapi==1.0.7
@@ -1226,7 +1226,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20260225.0
home-assistant-frontend==20260302.0
# homeassistant.components.conversation
home-assistant-intents==2026.2.13
@@ -2128,7 +2128,7 @@ pyhaversion==22.8.0
pyheos==1.0.6
# homeassistant.components.hive
pyhive-integration==1.0.7
pyhive-integration==1.0.8
# homeassistant.components.homematic
pyhomematic==0.1.77
@@ -2473,7 +2473,7 @@ pysmappee==0.2.29
pysmarlaapi==1.0.1
# homeassistant.components.smartthings
pysmartthings==3.5.3
pysmartthings==3.6.0
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -3145,7 +3145,7 @@ uasiren==0.0.1
uhooapi==1.2.6
# homeassistant.components.unifiprotect
uiprotect==10.2.1
uiprotect==10.2.2
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -3256,7 +3256,7 @@ webio-api==0.1.12
webmin-xmlrpc==0.0.2
# homeassistant.components.weheat
weheat==2026.1.25
weheat==2026.2.28
# homeassistant.components.whirlpool
whirlpool-sixth-sense==1.0.3
@@ -3307,7 +3307,7 @@ yalesmartalarmclient==0.4.3
# homeassistant.components.august
# homeassistant.components.yale
# homeassistant.components.yalexs_ble
yalexs-ble==3.2.4
yalexs-ble==3.2.7
# homeassistant.components.august
# homeassistant.components.yale

View File

@@ -121,7 +121,7 @@ Tami4EdgeAPI==3.0
WSDiscovery==2.1.2
# homeassistant.components.accuweather
accuweather==5.0.0
accuweather==5.1.0
# homeassistant.components.actron_air
actron-neo-api==0.4.1
@@ -181,7 +181,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.5
# homeassistant.components.alexa_devices
aioamazondevices==12.0.0
aioamazondevices==12.0.2
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -258,7 +258,7 @@ aioflo==2021.11.0
aioghost==0.4.0
# homeassistant.components.github
aiogithubapi==24.6.0
aiogithubapi==26.0.0
# homeassistant.components.guardian
aioguardian==2026.01.1
@@ -404,7 +404,7 @@ aioswitcher==6.1.0
aiosyncthing==0.7.1
# homeassistant.components.tankerkoenig
aiotankerkoenig==0.4.2
aiotankerkoenig==0.5.1
# homeassistant.components.tedee
aiotedee==0.2.25
@@ -431,7 +431,7 @@ aiowaqi==3.1.0
aiowatttime==0.1.1
# homeassistant.components.webdav
aiowebdav2==0.5.0
aiowebdav2==0.6.1
# homeassistant.components.webostv
aiowebostv==0.7.5
@@ -737,7 +737,7 @@ dremel3dpy==2.1.1
dropmqttapi==1.0.3
# homeassistant.components.dsmr
dsmr-parser==1.4.3
dsmr-parser==1.5.0
# homeassistant.components.dwd_weather_warnings
dwdwfsapi==1.0.7
@@ -1087,7 +1087,7 @@ hole==0.9.0
holidays==0.84
# homeassistant.components.frontend
home-assistant-frontend==20260225.0
home-assistant-frontend==20260302.0
# homeassistant.components.conversation
home-assistant-intents==2026.2.13
@@ -1814,7 +1814,7 @@ pyhaversion==22.8.0
pyheos==1.0.6
# homeassistant.components.hive
pyhive-integration==1.0.7
pyhive-integration==1.0.8
# homeassistant.components.homematic
pyhomematic==0.1.77
@@ -2102,7 +2102,7 @@ pysmappee==0.2.29
pysmarlaapi==1.0.1
# homeassistant.components.smartthings
pysmartthings==3.5.3
pysmartthings==3.6.0
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -2645,7 +2645,7 @@ uasiren==0.0.1
uhooapi==1.2.6
# homeassistant.components.unifiprotect
uiprotect==10.2.1
uiprotect==10.2.2
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -2738,7 +2738,7 @@ webio-api==0.1.12
webmin-xmlrpc==0.0.2
# homeassistant.components.weheat
weheat==2026.1.25
weheat==2026.2.28
# homeassistant.components.whirlpool
whirlpool-sixth-sense==1.0.3
@@ -2783,7 +2783,7 @@ yalesmartalarmclient==0.4.3
# homeassistant.components.august
# homeassistant.components.yale
# homeassistant.components.yalexs_ble
yalexs-ble==3.2.4
yalexs-ble==3.2.7
# homeassistant.components.august
# homeassistant.components.yale

View File

@@ -149,7 +149,6 @@ FORBIDDEN_PACKAGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
},
"flux_led": {"flux-led": {"async-timeout"}},
"foobot": {"foobot-async": {"async-timeout"}},
"github": {"aiogithubapi": {"async-timeout"}},
"harmony": {"aioharmony": {"async-timeout"}},
"here_travel_time": {
"here-routing": {"async-timeout"},

View File

@@ -136,6 +136,31 @@ async def test_forecast_service(
assert response == snapshot
async def test_forecast_daily_missing_average_humidity(
hass: HomeAssistant,
mock_accuweather_client: AsyncMock,
) -> None:
"""Test daily forecast does not crash when average humidity is missing."""
mock_accuweather_client.async_get_daily_forecast.return_value[0][
"RelativeHumidityDay"
] = {}
await init_integration(hass)
response = await hass.services.async_call(
WEATHER_DOMAIN,
SERVICE_GET_FORECASTS,
{
"entity_id": "weather.home",
"type": "daily",
},
blocking=True,
return_response=True,
)
assert response["weather.home"]["forecast"][0].get("humidity") is None
async def test_forecast_subscription(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,

View File

@@ -213,6 +213,7 @@ async def test_reauth_flow_scenario(
ap_fixture: AirOSData,
mock_airos_client: AsyncMock,
mock_config_entry: MockConfigEntry,
mock_setup_entry: AsyncMock,
) -> None:
"""Test successful reauthentication."""
mock_config_entry.add_to_hass(hass)
@@ -220,11 +221,15 @@ async def test_reauth_flow_scenario(
mock_airos_client.login.side_effect = AirOSConnectionAuthenticationError
await hass.config_entries.async_setup(mock_config_entry.entry_id)
flow = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id},
data=mock_config_entry.data,
)
with patch(
"homeassistant.components.airos.config_flow.async_get_firmware_data",
side_effect=AirOSConnectionAuthenticationError,
):
flow = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id},
data=mock_config_entry.data,
)
assert flow["type"] == FlowResultType.FORM
assert flow["step_id"] == REAUTH_STEP
@@ -236,20 +241,22 @@ async def test_reauth_flow_scenario(
hostname=ap_fixture.host.hostname,
)
mock_firmware = AsyncMock(return_value=valid_data)
with (
patch(
"homeassistant.components.airos.config_flow.async_get_firmware_data",
new=AsyncMock(return_value=valid_data),
new=mock_firmware,
),
patch(
"homeassistant.components.airos.async_get_firmware_data",
new=AsyncMock(return_value=valid_data),
new=mock_firmware,
),
):
result = await hass.config_entries.flow.async_configure(
flow["flow_id"],
user_input={CONF_PASSWORD: NEW_PASSWORD},
)
await hass.async_block_till_done(wait_background_tasks=True)
# Always test resolution
assert result["type"] is FlowResultType.ABORT

View File

@@ -4,7 +4,7 @@ import datetime
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
from anthropic import RateLimitError
from anthropic import AuthenticationError, RateLimitError
from anthropic.types import (
CitationsWebSearchResultLocation,
CitationWebSearchResultLocationParam,
@@ -36,8 +36,10 @@ from homeassistant.components.anthropic.const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DOMAIN,
)
from homeassistant.components.anthropic.entity import CitationDetails, ContentDetails
from homeassistant.config_entries import SOURCE_REAUTH
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import Context, HomeAssistant
from homeassistant.exceptions import HomeAssistantError
@@ -107,7 +109,7 @@ async def test_error_handling(
mock_init_component,
mock_create_stream: AsyncMock,
) -> None:
"""Test that the default prompt works."""
"""Test error handling."""
mock_create_stream.side_effect = RateLimitError(
message=None,
response=Response(status_code=429, request=Request(method="POST", url=URL())),
@@ -122,6 +124,38 @@ async def test_error_handling(
assert result.response.error_code == "unknown", result
async def test_auth_error_handling(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
) -> None:
"""Test reauth after authentication error during conversation."""
mock_create_stream.side_effect = AuthenticationError(
message="Invalid API key",
response=Response(status_code=403, request=Request(method="POST", url=URL())),
body=None,
)
result = await conversation.async_converse(
hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
)
assert result.response.response_type == intent.IntentResponseType.ERROR
assert result.response.error_code == "unknown", result
await hass.async_block_till_done()
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
flow = flows[0]
assert flow["step_id"] == "reauth_confirm"
assert flow["handler"] == DOMAIN
assert "context" in flow
assert flow["context"]["source"] == SOURCE_REAUTH
assert flow["context"]["entry_id"] == mock_config_entry.entry_id
async def test_template_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,

View File

@@ -0,0 +1,73 @@
# serializer version: 1
# name: test_entry_diagnostics[large]
dict({
'backup': list([
dict({
'addons': list([
]),
'backup_id': '23e64aec',
'database_included': True,
'date': '2024-11-22T11:48:48.727189+01:00',
'extra_metadata': dict({
}),
'folders': list([
]),
'homeassistant_included': True,
'homeassistant_version': '2024.12.0.dev0',
'name': 'Core 2024.12.0.dev0',
'protected': False,
'size': 20971520,
}),
]),
'backup_agents': list([
dict({
'name': 'test',
}),
]),
'config': dict({
'access_key_id': '**REDACTED**',
'bucket': 'test',
'endpoint_url': 'https://s3.eu-south-1.amazonaws.com',
'secret_access_key': '**REDACTED**',
}),
'coordinator_data': dict({
'all_backups_size': 20971520,
}),
})
# ---
# name: test_entry_diagnostics[small]
dict({
'backup': list([
dict({
'addons': list([
]),
'backup_id': '23e64aec',
'database_included': True,
'date': '2024-11-22T11:48:48.727189+01:00',
'extra_metadata': dict({
}),
'folders': list([
]),
'homeassistant_included': True,
'homeassistant_version': '2024.12.0.dev0',
'name': 'Core 2024.12.0.dev0',
'protected': False,
'size': 1048576,
}),
]),
'backup_agents': list([
dict({
'name': 'test',
}),
]),
'config': dict({
'access_key_id': '**REDACTED**',
'bucket': 'test',
'endpoint_url': 'https://s3.eu-south-1.amazonaws.com',
'secret_access_key': '**REDACTED**',
}),
'coordinator_data': dict({
'all_backups_size': 1048576,
}),
})
# ---

View File

@@ -0,0 +1,29 @@
"""Tests for AWS S3 diagnostics."""
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.typing import ClientSessionGenerator
async def test_entry_diagnostics(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
mock_config_entry: MockConfigEntry,
snapshot: SnapshotAssertion,
) -> None:
"""Test config entry diagnostics."""
mock_config_entry.add_to_hass(hass)
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert (
await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry)
== snapshot
)

View File

@@ -139,7 +139,7 @@
'object_id_base': 'Total energy',
'options': dict({
'sensor': dict({
'suggested_display_precision': 2,
'suggested_display_precision': 0,
}),
}),
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,

View File

@@ -3,6 +3,7 @@
from unittest.mock import AsyncMock
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import Platform
@@ -18,6 +19,7 @@ ENTITY_OUTSIDE_TEMP = "sensor.bsb_lan_outside_temperature"
ENTITY_TOTAL_ENERGY = "sensor.bsb_lan_total_energy"
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_sensor_entity_properties(
hass: HomeAssistant,
mock_bsblan: AsyncMock,

View File

@@ -2385,3 +2385,42 @@ async def test_ha_cast(hass: HomeAssistant, ha_controller_mock) -> None:
chromecast.unregister_handler.reset_mock()
unregister_cb()
chromecast.unregister_handler.assert_not_called()
async def test_entity_media_states_active_app_reported_idle(
hass: HomeAssistant, entity_registry: er.EntityRegistry
) -> None:
"""Test entity state when app is active but device reports idle (fixes #160814)."""
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
cast_status_cb, conn_status_cb, _ = get_status_callbacks(chromecast)
# Connect the device
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
# Scenario: Custom App is running (e.g. DashCast), but device reports is_idle=True
chromecast.app_id = "84912283" # Example Custom App ID
chromecast.is_idle = True # Device thinks it's idle/standby
# Trigger a status update
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.state == "idle"
# Scenario: Backdrop (Screensaver) is running. Should still be OFF.
chromecast.app_id = pychromecast.config.APP_BACKDROP
chromecast.is_idle = True
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"

View File

@@ -112,6 +112,27 @@ async def test_setup_entry_skips_zone_climates_without_support(
assert _zone_entity_id(entity_registry, zone_device, 0) is None
async def test_setup_entry_handles_missing_zone_temperature_key(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
zone_device: ZoneDevice,
) -> None:
"""Missing zone temperature keys do not break climate setup."""
configure_zone_device(zone_device, zones=[["Living", "1", 22]])
zone_device.values.pop("lztemp_h")
await _async_setup_daikin(hass, zone_device)
assert _zone_entity_id(entity_registry, zone_device, 0) is None
main_entity_id = entity_registry.async_get_entity_id(
CLIMATE_DOMAIN,
DOMAIN,
zone_device.mac,
)
assert main_entity_id is not None
assert hass.states.get(main_entity_id) is not None
@pytest.mark.parametrize(
("mode", "expected_zone_key"),
[("hot", "lztemp_h"), ("cool", "lztemp_c")],

View File

@@ -92,6 +92,7 @@ FIXTURES = [
"mock_window_covering_tilt",
"onoff_light_with_levelcontrol_present",
"resideo_x2s_thermostat",
"roborock_saros_10",
"secuyou_smart_lock",
"silabs_dishwasher",
"silabs_evse_charging",

View File

@@ -0,0 +1,540 @@
{
"node_id": 202,
"date_commissioned": "2025-01-01T00:00:00",
"last_interview": "2026-01-01T00:00:00",
"interview_version": 6,
"available": true,
"is_bridge": false,
"attributes": {
"0/29/0": [
{
"0": 22,
"1": 1
}
],
"0/29/1": [29, 31, 40, 48, 49, 50, 51, 60, 62, 63],
"0/29/2": [],
"0/29/3": [1],
"0/29/65533": 2,
"0/29/65532": 0,
"0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/29/65529": [],
"0/29/65528": [],
"0/31/0": [
{
"1": 5,
"2": 2,
"3": [112233],
"4": null,
"254": 2
}
],
"0/31/2": 4,
"0/31/3": 3,
"0/31/4": 4,
"0/31/65533": 2,
"0/31/65532": 0,
"0/31/65531": [0, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/31/65529": [],
"0/31/65528": [],
"0/40/0": 18,
"0/40/1": "Roborock",
"0/40/2": 5248,
"0/40/3": "Robotic Vacuum Cleaner",
"0/40/4": 5,
"0/40/5": "",
"0/40/6": "**REDACTED**",
"0/40/7": 2,
"0/40/8": "1.4",
"0/40/9": 2,
"0/40/10": "1.4",
"0/40/13": "https://www.roborock.com",
"0/40/14": "Robotic Vacuum Cleaner",
"0/40/15": "RAPEED12345678",
"0/40/18": "12AB12AB12AB12AB",
"0/40/19": {
"0": 3,
"1": 65535
},
"0/40/21": 17039360,
"0/40/22": 1,
"0/40/24": 1,
"0/40/65533": 4,
"0/40/65532": 0,
"0/40/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 14, 15, 18, 19, 21, 22, 65528,
65529, 65531, 65532, 65533
],
"0/40/65529": [],
"0/40/65528": [],
"0/48/0": 0,
"0/48/1": {
"0": 60,
"1": 900
},
"0/48/2": 0,
"0/48/3": 2,
"0/48/4": true,
"0/48/65533": 2,
"0/48/65532": 0,
"0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/48/65529": [0, 2, 4],
"0/48/65528": [1, 3, 5],
"0/49/0": 1,
"0/49/1": [],
"0/49/4": true,
"0/49/5": 0,
"0/49/6": null,
"0/49/7": null,
"0/49/2": 30,
"0/49/3": 60,
"0/49/8": [0],
"0/49/65533": 2,
"0/49/65532": 1,
"0/49/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533
],
"0/49/65529": [0, 2, 4, 6, 8],
"0/49/65528": [1, 5, 7],
"0/50/65533": 1,
"0/50/65532": 0,
"0/50/65531": [65528, 65529, 65531, 65532, 65533],
"0/50/65529": [0],
"0/50/65528": [1],
"0/51/0": [
{
"0": "ap0",
"1": false,
"2": null,
"3": null,
"4": "sko58laD",
"5": [],
"6": [],
"7": 0
},
{
"0": "wlan0",
"1": true,
"2": null,
"3": null,
"4": "sEo58laD",
"5": ["wKhQuQ=="],
"6": [
"/XqKrJXsABCySjn//vJWgw==",
"KgIBaTwJABCySjn//vJWgw==",
"/oAAAAAAAACySjn//vJWgw=="
],
"7": 0
},
{
"0": "sit0",
"1": false,
"2": null,
"3": null,
"4": "AAAAAAAA",
"5": [],
"6": [],
"7": 0
},
{
"0": "lo",
"1": true,
"2": null,
"3": null,
"4": "AAAAAAAA",
"5": ["fwAAAQ=="],
"6": ["AAAAAAAAAAAAAAAAAAAAAQ=="],
"7": 0
}
],
"0/51/1": 296,
"0/51/2": 8,
"0/51/3": 6328,
"0/51/8": false,
"0/51/65533": 2,
"0/51/65532": 0,
"0/51/65531": [0, 1, 2, 3, 8, 65528, 65529, 65531, 65532, 65533],
"0/51/65529": [0, 1],
"0/51/65528": [2],
"0/60/0": 0,
"0/60/1": null,
"0/60/2": null,
"0/60/65533": 1,
"0/60/65532": 0,
"0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"0/60/65529": [0, 2],
"0/60/65528": [],
"0/62/0": [
{
"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRyhgkBwEkCAEwCUEEFn0vNfCOD0dTxJ+/vIAsLHsPottGgAzLEYjD0IZda+wcLI6otwL3l70MZK44UQact9g+kLna4RHtR2DtJjzi3DcKNQEoARgkAgE2AwQCBAEYMAQUfe7BMayXJA5FAhU93iHoPeGaicwwBRS9bdraaL8JLSNzrDNJcbicl5ghHRgwC0DAfR8r1sKukiqQw8dPHxQBsDVYjQ2jyerfvkYRSMQGIr9Pr594PCSUazATbDgxf9kvIT7cpAnWVjA1YaYLXSlVGA==",
"2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEYKwzNQoI9xg/J/BXjm//XmufngPSiphrXcf/ZbJxf7K3k8Xo7I77pwece9Uj8QnKrMMUdloy0sNyxbIPkTGpyjcKNQEpARgkAmAwBBS9bdraaL8JLSNzrDNJcbicl5ghHTAFFBfVqc98NGU0Xt+pmyNVXJvnhDlkGDALQKoRuyZfkC/AbH9qIIxjOhkfJB2ZS8sovhbN1fo+cvSfZXdBw255Ytf9nag0yY2maE5thqhIE4MgGV9jwQ2EPysY",
"254": 2
}
],
"0/62/1": [
{
"1": "BFhpm8fVgw4hzcuwFGwSe59XhvdUHtMntaUUbgCX0jqoaA1fjjcRYrZCA0PDImdLtZSkrUdug3S/euAVf4gvaKo=",
"2": 4939,
"3": 2,
"4": 202,
"5": "Home Assistant",
"254": 2
}
],
"0/62/2": 5,
"0/62/3": 2,
"0/62/4": [
"FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEZGswP7Cx5r/rggyFyL5F/W2s7jQv9jdnF/BtORJ5CJLHyNrJouomrpNPkewkATT25URTzakxfZ/BC2RRof3LQjcKNQEpARgkAmAwBBSwDB1/C2jgnr2LPAd9KH/07G7HSjAFFLAMHX8LaOCevYs8B30of/TsbsdKGDALQGEJod+l+O0QOa/rnbYaghE4QgquJyT9pviD3sP2+MbUXJj1br+dZLQ7CfeCKfbM8EO9iPAe1ULLveIFfHakCpAY",
"FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEDheXhz87ejqXrjJrfcRfXbv1Co84yVLcfxYr3Q4VM5Fx0JCbQDNTmqeZ/BC67MDnaqXhrPHz6tPXjC7kar6RLDcKNQEpARgkAmAwBBQX1anPfDRlNF7fqZsjVVyb54Q5ZDAFFBfVqc98NGU0Xt+pmyNVXJvnhDlkGDALQFQj3btpuzZU/TNTTTh2Q/bUE8TTOP7U4kV4J8VNyl/phUUHSfnTAnaTR/YcUehZcgPJqnW6433HWTjsa8lopVMY"
],
"0/62/5": 2,
"0/62/65533": 1,
"0/62/65532": 0,
"0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533],
"0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11],
"0/62/65528": [1, 3, 5, 8],
"0/63/0": [],
"0/63/1": [],
"0/63/2": 4,
"0/63/3": 3,
"0/63/65533": 2,
"0/63/65532": 0,
"0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/63/65529": [0, 1, 3, 4],
"0/63/65528": [2, 5],
"1/29/0": [
{
"0": 17,
"1": 1
},
{
"0": 116,
"1": 1
}
],
"1/29/1": [3, 29, 47, 84, 85, 97, 336],
"1/29/2": [],
"1/29/3": [],
"1/29/65533": 2,
"1/29/65532": 0,
"1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"1/29/65529": [],
"1/29/65528": [],
"1/3/0": 0,
"1/3/1": 3,
"1/3/65533": 5,
"1/3/65532": 0,
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/3/65529": [0],
"1/3/65528": [],
"1/336/0": [
{
"0": 1,
"1": 0,
"2": {
"0": {
"0": "Living room",
"1": null,
"2": 52
},
"1": null
}
},
{
"0": 2,
"1": 0,
"2": {
"0": {
"0": "Bathroom",
"1": null,
"2": 6
},
"1": null
}
},
{
"0": 3,
"1": 0,
"2": {
"0": {
"0": "Bedroom",
"1": null,
"2": 7
},
"1": null
}
},
{
"0": 4,
"1": 0,
"2": {
"0": {
"0": "Office",
"1": null,
"2": 88
},
"1": null
}
},
{
"0": 5,
"1": 0,
"2": {
"0": {
"0": "Corridor",
"1": null,
"2": 16
},
"1": null
}
},
{
"0": 6,
"1": 0,
"2": {
"0": null,
"1": {
"0": 17,
"1": 2
}
}
},
{
"0": 7,
"1": 0,
"2": {
"0": null,
"1": {
"0": 43,
"1": 2
}
}
}
],
"1/336/2": [],
"1/336/65533": 1,
"1/336/65532": 4,
"1/336/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"1/336/65529": [0],
"1/336/65528": [1],
"1/336/1": [
{
"0": 0,
"1": "Map-0"
}
],
"1/47/0": 1,
"1/47/1": 0,
"1/47/2": "Primary Battery",
"1/47/31": [],
"1/47/12": 200,
"1/47/14": 0,
"1/47/15": false,
"1/47/16": 3,
"1/47/17": true,
"1/47/26": 2,
"1/47/28": true,
"1/47/65533": 3,
"1/47/65532": 6,
"1/47/65531": [
0, 1, 2, 12, 14, 15, 16, 17, 26, 28, 31, 65528, 65529, 65531, 65532, 65533
],
"1/47/65529": [],
"1/47/65528": [],
"1/84/0": [
{
"label": "Idle",
"mode": 0,
"modeTags": [
{
"value": 16384
}
]
},
{
"label": "Cleaning",
"mode": 1,
"modeTags": [
{
"value": 16385
}
]
},
{
"label": "Mapping",
"mode": 2,
"modeTags": [
{
"value": 16386
}
]
}
],
"1/84/1": 0,
"1/84/65533": 3,
"1/84/65532": 0,
"1/84/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/84/65529": [0],
"1/84/65528": [1],
"1/85/0": [
{
"label": "Quiet, Vacuum Only",
"mode": 1,
"modeTags": [
{
"value": 2
},
{
"value": 16385
}
]
},
{
"label": "Auto, Vacuum Only",
"mode": 2,
"modeTags": [
{
"value": 0
},
{
"value": 16385
}
]
},
{
"label": "Deep Clean, Vacuum Only",
"mode": 3,
"modeTags": [
{
"value": 16384
},
{
"value": 16385
}
]
},
{
"label": "Quiet, Mop Only",
"mode": 4,
"modeTags": [
{
"value": 2
},
{
"value": 16386
}
]
},
{
"label": "Auto, Mop Only",
"mode": 5,
"modeTags": [
{
"value": 0
},
{
"value": 16386
}
]
},
{
"label": "Deep Clean, Mop Only",
"mode": 6,
"modeTags": [
{
"value": 16384
},
{
"value": 16386
}
]
},
{
"label": "Quiet, Vacuum and Mop",
"mode": 7,
"modeTags": [
{
"value": 2
},
{
"value": 16385
},
{
"value": 16386
}
]
},
{
"label": "Auto, Vacuum and Mop",
"mode": 8,
"modeTags": [
{
"value": 0
},
{
"value": 16385
},
{
"value": 16386
}
]
},
{
"label": "Deep Clean, Vacuum and Mop",
"mode": 9,
"modeTags": [
{
"value": 16384
},
{
"value": 16385
},
{
"value": 16386
}
]
}
],
"1/85/1": 8,
"1/85/65533": 3,
"1/85/65532": 0,
"1/85/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/85/65529": [0],
"1/85/65528": [1],
"1/97/0": null,
"1/97/1": null,
"1/97/3": [
{
"0": 0
},
{
"0": 1
},
{
"0": 2
},
{
"0": 3
},
{
"0": 64
},
{
"0": 65
},
{
"0": 66
}
],
"1/97/4": 66,
"1/97/5": {
"0": 0
},
"1/97/65533": 2,
"1/97/65532": 0,
"1/97/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533],
"1/97/65529": [0, 3, 128],
"1/97/65528": [4]
},
"attribute_subscriptions": []
}

View File

@@ -3485,6 +3485,56 @@
'state': 'unknown',
})
# ---
# name: test_buttons[roborock_saros_10][button.robotic_vacuum_cleaner_identify-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'button.robotic_vacuum_cleaner_identify',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Identify',
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[roborock_saros_10][button.robotic_vacuum_cleaner_identify-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Robotic Vacuum Cleaner Identify',
}),
'context': <ANY>,
'entity_id': 'button.robotic_vacuum_cleaner_identify',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[secuyou_smart_lock][button.secuyou_smart_lock_identify-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -3995,6 +3995,78 @@
'state': 'previous',
})
# ---
# name: test_selects[roborock_saros_10][select.robotic_vacuum_cleaner_clean_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'Quiet, Vacuum Only',
'Auto, Vacuum Only',
'Deep Clean, Vacuum Only',
'Quiet, Mop Only',
'Auto, Mop Only',
'Deep Clean, Mop Only',
'Quiet, Vacuum and Mop',
'Auto, Vacuum and Mop',
'Deep Clean, Vacuum and Mop',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'select',
'entity_category': None,
'entity_id': 'select.robotic_vacuum_cleaner_clean_mode',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Clean mode',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Clean mode',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'clean_mode',
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-MatterRvcCleanMode-85-1',
'unit_of_measurement': None,
})
# ---
# name: test_selects[roborock_saros_10][select.robotic_vacuum_cleaner_clean_mode-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Robotic Vacuum Cleaner Clean mode',
'options': list([
'Quiet, Vacuum Only',
'Auto, Vacuum Only',
'Deep Clean, Vacuum Only',
'Quiet, Mop Only',
'Auto, Mop Only',
'Deep Clean, Mop Only',
'Quiet, Vacuum and Mop',
'Auto, Vacuum and Mop',
'Deep Clean, Vacuum and Mop',
]),
}),
'context': <ANY>,
'entity_id': 'select.robotic_vacuum_cleaner_clean_mode',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'Auto, Vacuum and Mop',
})
# ---
# name: test_selects[secuyou_smart_lock][select.secuyou_smart_lock_operating_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -11473,6 +11473,283 @@
'state': '20.55',
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_battery-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.robotic_vacuum_cleaner_battery',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Battery',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.BATTERY: 'battery'>,
'original_icon': None,
'original_name': 'Battery',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-PowerSource-47-12',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_battery-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'battery',
'friendly_name': 'Robotic Vacuum Cleaner Battery',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.robotic_vacuum_cleaner_battery',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '100',
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_battery_charge_state-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'not_charging',
'charging',
'full_charge',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.robotic_vacuum_cleaner_battery_charge_state',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Battery charge state',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
'original_icon': None,
'original_name': 'Battery charge state',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'battery_charge_state',
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-PowerSourceBatChargeState-47-26',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_battery_charge_state-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'enum',
'friendly_name': 'Robotic Vacuum Cleaner Battery charge state',
'options': list([
'not_charging',
'charging',
'full_charge',
]),
}),
'context': <ANY>,
'entity_id': 'sensor.robotic_vacuum_cleaner_battery_charge_state',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'full_charge',
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_operational_error-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'no_error',
'unable_to_start_or_resume',
'unable_to_complete_operation',
'command_invalid_in_state',
'failed_to_find_charging_dock',
'stuck',
'dust_bin_missing',
'dust_bin_full',
'water_tank_empty',
'water_tank_missing',
'water_tank_lid_open',
'mop_cleaning_pad_missing',
'low_battery',
'cannot_reach_target_area',
'dirty_water_tank_full',
'dirty_water_tank_missing',
'wheels_jammed',
'brush_jammed',
'navigation_sensor_obscured',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.robotic_vacuum_cleaner_operational_error',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Operational error',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
'original_icon': None,
'original_name': 'Operational error',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'operational_error',
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-RvcOperationalStateOperationalError-97-5',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_operational_error-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'enum',
'friendly_name': 'Robotic Vacuum Cleaner Operational error',
'options': list([
'no_error',
'unable_to_start_or_resume',
'unable_to_complete_operation',
'command_invalid_in_state',
'failed_to_find_charging_dock',
'stuck',
'dust_bin_missing',
'dust_bin_full',
'water_tank_empty',
'water_tank_missing',
'water_tank_lid_open',
'mop_cleaning_pad_missing',
'low_battery',
'cannot_reach_target_area',
'dirty_water_tank_full',
'dirty_water_tank_missing',
'wheels_jammed',
'brush_jammed',
'navigation_sensor_obscured',
]),
}),
'context': <ANY>,
'entity_id': 'sensor.robotic_vacuum_cleaner_operational_error',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'no_error',
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_operational_state-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'stopped',
'running',
'paused',
'error',
'seeking_charger',
'charging',
'docked',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.robotic_vacuum_cleaner_operational_state',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Operational state',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
'original_icon': None,
'original_name': 'Operational state',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'operational_state',
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-RvcOperationalState-97-4',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[roborock_saros_10][sensor.robotic_vacuum_cleaner_operational_state-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'enum',
'friendly_name': 'Robotic Vacuum Cleaner Operational state',
'options': list([
'stopped',
'running',
'paused',
'error',
'seeking_charger',
'charging',
'docked',
]),
}),
'context': <ANY>,
'entity_id': 'sensor.robotic_vacuum_cleaner_operational_state',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docked',
})
# ---
# name: test_sensors[silabs_dishwasher][sensor.dishwasher_effective_current-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -149,6 +149,56 @@
'state': 'idle',
})
# ---
# name: test_vacuum[roborock_saros_10][vacuum.robotic_vacuum_cleaner-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'vacuum',
'entity_category': None,
'entity_id': 'vacuum.robotic_vacuum_cleaner',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': None,
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': <VacuumEntityFeature: 29212>,
'translation_key': 'vacuum',
'unique_id': '00000000000004D2-00000000000000CA-MatterNodeDevice-1-MatterVacuumCleaner-84-1',
'unit_of_measurement': None,
})
# ---
# name: test_vacuum[roborock_saros_10][vacuum.robotic_vacuum_cleaner-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Robotic Vacuum Cleaner',
'supported_features': <VacuumEntityFeature: 29212>,
}),
'context': <ANY>,
'entity_id': 'vacuum.robotic_vacuum_cleaner',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docked',
})
# ---
# name: test_vacuum[switchbot_k11_plus][vacuum.k11-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -338,6 +338,38 @@ async def test_vacuum_get_segments(
assert segments[2] == {"id": "2290649224", "name": "My Location C", "group": None}
@pytest.mark.parametrize("node_fixture", ["roborock_saros_10"])
async def test_vacuum_get_segments_nullable_location_info(
hass: HomeAssistant,
matter_node: MatterNode,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test vacuum get_segments handles nullable ServiceArea location info."""
await async_setup_component(hass, "homeassistant", {})
assert matter_node
entity_ids = [state.entity_id for state in hass.states.async_all("vacuum")]
assert len(entity_ids) == 1
entity_id = entity_ids[0]
state = hass.states.get(entity_id)
assert state
client = await hass_ws_client(hass)
await client.send_json_auto_id(
{"type": "vacuum/get_segments", "entity_id": entity_id}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["result"]["segments"] == [
{"id": "1", "name": "Living room", "group": None},
{"id": "2", "name": "Bathroom", "group": None},
{"id": "3", "name": "Bedroom", "group": None},
{"id": "4", "name": "Office", "group": None},
{"id": "5", "name": "Corridor", "group": None},
]
@pytest.mark.parametrize("node_fixture", ["mock_vacuum_cleaner"])
async def test_vacuum_clean_area(
hass: HomeAssistant,
@@ -365,12 +397,11 @@ async def test_vacuum_clean_area(
},
)
# Mock a successful SelectAreasResponse
matter_client.send_device_command.return_value = (
clusters.ServiceArea.Commands.SelectAreasResponse(
status=clusters.ServiceArea.Enums.SelectAreasStatus.kSuccess,
)
)
# Mock a successful SelectAreasResponse (returns as dict over websocket)
matter_client.send_device_command.return_value = {
"status": clusters.ServiceArea.Enums.SelectAreasStatus.kSuccess,
"statusText": "",
}
await hass.services.async_call(
VACUUM_DOMAIN,
@@ -420,13 +451,11 @@ async def test_vacuum_clean_area_select_areas_failure(
},
)
# Mock a failed SelectAreasResponse
matter_client.send_device_command.return_value = (
clusters.ServiceArea.Commands.SelectAreasResponse(
status=clusters.ServiceArea.Enums.SelectAreasStatus.kUnsupportedArea,
statusText="Area 7 not supported",
)
)
# Mock a failed SelectAreasResponse (returns as dict over websocket)
matter_client.send_device_command.return_value = {
"status": clusters.ServiceArea.Enums.SelectAreasStatus.kUnsupportedArea,
"statusText": "Area 7 not supported",
}
with pytest.raises(HomeAssistantError, match="Failed to select areas"):
await hass.services.async_call(

View File

@@ -267,6 +267,7 @@ async def test_subentry_unsupported_model(
("gpt-5.1", ["none", "low", "medium", "high"]),
("gpt-5.2", ["none", "low", "medium", "high", "xhigh"]),
("gpt-5.2-pro", ["medium", "high", "xhigh"]),
("gpt-5.3-codex", ["none", "low", "medium", "high", "xhigh"]),
],
)
async def test_subentry_reasoning_effort_list(
@@ -311,8 +312,15 @@ async def test_subentry_reasoning_effort_list(
)
async def test_subentry_websearch_unsupported_reasoning_effort(
hass: HomeAssistant, mock_config_entry, mock_init_component
@pytest.mark.parametrize(
("parameter", "error"),
[
(CONF_WEB_SEARCH, "web_search_minimal_reasoning"),
(CONF_CODE_INTERPRETER, "code_interpreter_minimal_reasoning"),
],
)
async def test_subentry_unsupported_reasoning_effort(
hass: HomeAssistant, mock_config_entry, mock_init_component, parameter, error
) -> None:
"""Test the subentry form giving error about unsupported minimal reasoning effort."""
subentry = next(iter(mock_config_entry.subentries.values()))
@@ -349,18 +357,18 @@ async def test_subentry_websearch_unsupported_reasoning_effort(
subentry_flow["flow_id"],
{
CONF_REASONING_EFFORT: "minimal",
CONF_WEB_SEARCH: True,
parameter: True,
},
)
assert subentry_flow["type"] is FlowResultType.FORM
assert subentry_flow["errors"] == {"web_search": "web_search_minimal_reasoning"}
assert subentry_flow["errors"] == {parameter: error}
# Reconfigure model step
subentry_flow = await hass.config_entries.subentries.async_configure(
subentry_flow["flow_id"],
{
CONF_REASONING_EFFORT: "low",
CONF_WEB_SEARCH: True,
parameter: True,
},
)
assert subentry_flow["type"] is FlowResultType.ABORT

View File

@@ -54,7 +54,7 @@ async def test_full_flow(
{CONF_URL: "http://overseerr.test", CONF_API_KEY: "test-key"},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Overseerr"
assert result["title"] == "Seerr"
assert result["data"] == {
CONF_HOST: "overseerr.test",
CONF_PORT: 80,

View File

@@ -84,7 +84,7 @@ async def test_service_get_requests_no_meta(
"get_requests",
OverseerrConnectionError("Timeout"),
HomeAssistantError,
"Error connecting to the Overseerr instance: Timeout",
"Error connecting to the Seerr instance: Timeout",
)
],
)

View File

@@ -0,0 +1,208 @@
# serializer version: 1
# name: test_device_registry
list([
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/dashboard',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Endpoint',
'model_id': None,
'name': 'my-environment',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': None,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/aa86eacfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_funny_chatelet',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Container',
'model_id': None,
'name': 'funny_chatelet',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/dd19facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_focused_einstein',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Container',
'model_id': None,
'name': 'focused_einstein',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/ee20facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_practical_morse',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Container',
'model_id': None,
'name': 'practical_morse',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/stacks/webstack',
'connections': set({
}),
'disabled_by': None,
'entry_type': None,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_stack_1',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Stack',
'model_id': None,
'name': 'webstack',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/bb97facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_serene_banach',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Container',
'model_id': None,
'name': 'serene_banach',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/cc08facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf',
'connections': set({
}),
'disabled_by': None,
'entry_type': <DeviceEntryType.SERVICE: 'service'>,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'portainer',
'portainer_test_entry_123_1_stoic_turing',
),
}),
'labels': set({
}),
'manufacturer': 'Portainer',
'model': 'Container',
'model_id': None,
'name': 'stoic_turing',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': <ANY>,
}),
])
# ---

View File

@@ -8,6 +8,7 @@ from pyportainer.exceptions import (
PortainerTimeoutError,
)
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.portainer.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
@@ -166,3 +167,19 @@ async def test_migration_v3_to_v4(
(DOMAIN, f"{entry.entry_id}_1_adguard"),
}
assert entity_after.unique_id == f"{entry.entry_id}_1_adguard_container"
async def test_device_registry(
hass: HomeAssistant,
mock_portainer_client: AsyncMock,
mock_config_entry: MockConfigEntry,
device_registry: dr.DeviceRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test devices are correctly registered."""
await setup_integration(hass, mock_config_entry)
device_entries = dr.async_entries_for_config_entry(
device_registry, mock_config_entry.entry_id
)
assert device_entries == snapshot

View File

@@ -15,12 +15,14 @@ from homeassistant.components.switch import (
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CODE,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.device_registry import DeviceRegistry
from homeassistant.helpers.entity_registry import EntityRegistry
@@ -176,3 +178,35 @@ async def test_switch_last_reported(
assert first_reported != hass.states.get("switch.switchable_output").last_reported
assert len(events) == 1 # last_reported shall not fire state_changed
async def test_switch_actions_require_code(
hass: HomeAssistant,
mock_satel: AsyncMock,
mock_config_entry_with_subentries: MockConfigEntry,
) -> None:
"""Test switch actions fail when access code is missing."""
await setup_integration(hass, mock_config_entry_with_subentries)
hass.config_entries.async_update_entry(
mock_config_entry_with_subentries, options={CONF_CODE: None}
)
await hass.async_block_till_done()
# Turning the device on or off should raise ServiceValidationError.
with pytest.raises(ServiceValidationError):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.switchable_output"},
blocking=True,
)
with pytest.raises(ServiceValidationError):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.switchable_output"},
blocking=True,
)

View File

@@ -31,7 +31,7 @@ from tests.common import MockConfigEntry
type ComponentSetup = Callable[[], Awaitable[None]]
BACKUP_METADATA = {
"file_path": "backup_location/backup.tar",
"file_path": "/backup_location/backup.tar",
"metadata": {
"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}],
"backup_id": "test-backup",
@@ -60,7 +60,7 @@ USER_INPUT = {
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PRIVATE_KEY_FILE: PRIVATE_KEY_FILE_UUID,
CONF_BACKUP_LOCATION: "backup_location",
CONF_BACKUP_LOCATION: "/backup_location",
}
TEST_AGENT_ID = ulid()
@@ -118,7 +118,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PRIVATE_KEY_FILE: str(private_key),
CONF_BACKUP_LOCATION: "backup_location",
CONF_BACKUP_LOCATION: "/backup_location",
},
)

View File

@@ -151,7 +151,7 @@ async def test_agents_list_backups_include_bad_metadata(
# Called two times, one for bad backup metadata and once for good
assert mock_ssh_connection._sftp._mock_open._mock_read.call_count == 2
assert (
"Failed to load backup metadata from file: backup_location/invalid.metadata.json. Expecting value: line 1 column 1 (char 0)"
"Failed to load backup metadata from file: /backup_location/invalid.metadata.json. Expecting value: line 1 column 1 (char 0)"
in caplog.messages
)

Some files were not shown because too many files have changed in this diff Show More