forked from home-assistant/core
Compare commits
22 Commits
2025.3.0b1
...
2025.3.0b2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
178d509d56 | ||
|
|
09c129de40 | ||
|
|
07128ba063 | ||
|
|
a786ff53ff | ||
|
|
d2e19c829d | ||
|
|
94b342f26a | ||
|
|
9e3e6b3f43 | ||
|
|
4300900322 | ||
|
|
342e04974d | ||
|
|
fdb4c0a81f | ||
|
|
6de878ffe4 | ||
|
|
c63aaec09e | ||
|
|
d8bf47c101 | ||
|
|
736ff8828d | ||
|
|
b501999a4c | ||
|
|
3985f1c6c8 | ||
|
|
46ec3987a8 | ||
|
|
df4e5a54e3 | ||
|
|
d8a259044f | ||
|
|
0891669aee | ||
|
|
83c0351338 | ||
|
|
c5e5fe555d |
40
.github/workflows/wheels.yml
vendored
40
.github/workflows/wheels.yml
vendored
@@ -218,15 +218,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
@@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
||||
@@ -14,6 +14,7 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -308,6 +309,12 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -1605,10 +1612,24 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
|
||||
@@ -68,7 +68,6 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
||||
@@ -126,7 +126,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Intents for the client integration."""
|
||||
"""Intents for the climate integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -20,49 +19,9 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250227.0"]
|
||||
"requirements": ["home-assistant-frontend==20250228.0"]
|
||||
}
|
||||
|
||||
@@ -111,9 +111,20 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
if key == "type":
|
||||
elif key == "type":
|
||||
val = val.upper()
|
||||
if key == "items":
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.15.0"],
|
||||
"requirements": ["aiohomeconnect==0.15.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
@@ -140,6 +141,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.async_register(hass, GetCurrentDateIntentHandler())
|
||||
intent.async_register(hass, GetCurrentTimeIntentHandler())
|
||||
intent.async_register(hass, RespondIntentHandler())
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
|
||||
return True
|
||||
|
||||
@@ -444,6 +446,48 @@ class RespondIntentHandler(intent.IntentHandler):
|
||||
return response
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = intent.INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {CLIMATE_DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name,
|
||||
area_name=area,
|
||||
domains=[CLIMATE_DOMAIN],
|
||||
assistant=intent_obj.assistant,
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
async def _async_process_intent(
|
||||
hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol
|
||||
) -> None:
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.01.26"],
|
||||
"requirements": ["yt-dlp[default]==2025.02.19"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ from .const import (
|
||||
ATTR_ALBUM_TYPE,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_ITEMS,
|
||||
@@ -32,6 +33,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_RADIO,
|
||||
ATTR_SEARCH,
|
||||
ATTR_SEARCH_ALBUM,
|
||||
@@ -48,7 +50,15 @@ from .schemas import (
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import Album, Artist, Playlist, Radio, Track
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
Audiobook,
|
||||
Playlist,
|
||||
Podcast,
|
||||
Radio,
|
||||
Track,
|
||||
)
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
@@ -155,6 +165,14 @@ async def handle_search(call: ServiceCall) -> ServiceResponse:
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.radio
|
||||
],
|
||||
ATTR_AUDIOBOOKS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.audiobooks
|
||||
],
|
||||
ATTR_PODCASTS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.podcasts
|
||||
],
|
||||
}
|
||||
)
|
||||
return response
|
||||
@@ -175,7 +193,13 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
"order_by": order_by,
|
||||
}
|
||||
library_result: (
|
||||
list[Album] | list[Artist] | list[Track] | list[Radio] | list[Playlist]
|
||||
list[Album]
|
||||
| list[Artist]
|
||||
| list[Track]
|
||||
| list[Radio]
|
||||
| list[Playlist]
|
||||
| list[Audiobook]
|
||||
| list[Podcast]
|
||||
)
|
||||
if media_type == MediaType.ALBUM:
|
||||
library_result = await mass.music.get_library_albums(
|
||||
@@ -199,6 +223,14 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
library_result = await mass.music.get_library_playlists(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.AUDIOBOOK:
|
||||
library_result = await mass.music.get_library_audiobooks(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.PODCAST:
|
||||
library_result = await mass.music.get_library_podcasts(
|
||||
**base_params,
|
||||
)
|
||||
else:
|
||||
raise ServiceValidationError(f"Unsupported media type {media_type}")
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ ATTR_ARTISTS = "artists"
|
||||
ATTR_ALBUMS = "albums"
|
||||
ATTR_TRACKS = "tracks"
|
||||
ATTR_PLAYLISTS = "playlists"
|
||||
ATTR_AUDIOBOOKS = "audiobooks"
|
||||
ATTR_PODCASTS = "podcasts"
|
||||
ATTR_RADIO = "radio"
|
||||
ATTR_ITEMS = "items"
|
||||
ATTR_RADIO_MODE = "radio_mode"
|
||||
|
||||
@@ -15,6 +15,7 @@ from .const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_BIT_DEPTH,
|
||||
ATTR_CONTENT_TYPE,
|
||||
ATTR_CURRENT_INDEX,
|
||||
@@ -31,6 +32,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_PROVIDER,
|
||||
ATTR_QUEUE_ID,
|
||||
ATTR_QUEUE_ITEM_ID,
|
||||
@@ -101,6 +103,12 @@ SEARCH_RESULT_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_RADIO): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_AUDIOBOOKS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_PODCASTS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -21,7 +21,10 @@ play_media:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- folder
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -118,7 +121,9 @@ search:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -160,7 +165,9 @@ get_library:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
favorite:
|
||||
|
||||
@@ -195,8 +195,11 @@
|
||||
"options": {
|
||||
"artist": "Artist",
|
||||
"album": "Album",
|
||||
"audiobook": "Audiobook",
|
||||
"folder": "Folder",
|
||||
"track": "Track",
|
||||
"playlist": "Playlist",
|
||||
"podcast": "Podcast",
|
||||
"radio": "Radio"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -41,14 +41,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool:
|
||||
"""Set up OneDrive from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
client = OneDriveClient(get_access_token, async_get_clientsession(hass))
|
||||
client, get_access_token = await _get_onedrive_client(hass, entry)
|
||||
|
||||
# get approot, will be created automatically if it does not exist
|
||||
approot = await _handle_item_operation(client.get_approot, "approot")
|
||||
@@ -164,20 +157,47 @@ async def async_migrate_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -
|
||||
_LOGGER.debug(
|
||||
"Migrating OneDrive config entry from version %s.%s", version, minor_version
|
||||
)
|
||||
|
||||
client, _ = await _get_onedrive_client(hass, entry)
|
||||
instance_id = await async_get_instance_id(hass)
|
||||
try:
|
||||
approot = await client.get_approot()
|
||||
folder = await client.get_drive_item(
|
||||
f"{approot.id}:/backups_{instance_id[:8]}:"
|
||||
)
|
||||
except OneDriveException:
|
||||
_LOGGER.exception("Migration to version 1.2 failed")
|
||||
return False
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_FOLDER_ID: "id", # will be updated during setup_entry
|
||||
CONF_FOLDER_ID: folder.id,
|
||||
CONF_FOLDER_NAME: f"backups_{instance_id[:8]}",
|
||||
},
|
||||
minor_version=2,
|
||||
)
|
||||
_LOGGER.debug("Migration to version 1.2 successful")
|
||||
return True
|
||||
|
||||
|
||||
async def _get_onedrive_client(
|
||||
hass: HomeAssistant, entry: OneDriveConfigEntry
|
||||
) -> tuple[OneDriveClient, Callable[[], Awaitable[str]]]:
|
||||
"""Get OneDrive client."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
return (
|
||||
OneDriveClient(get_access_token, async_get_clientsession(hass)),
|
||||
get_access_token,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_item_operation(
|
||||
func: Callable[[], Awaitable[Item]], folder: str
|
||||
) -> Item:
|
||||
|
||||
33
homeassistant/components/onedrive/diagnostics.py
Normal file
33
homeassistant/components/onedrive/diagnostics.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Diagnostics support for OneDrive."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
TO_REDACT = {"display_name", "email", CONF_ACCESS_TOKEN, CONF_TOKEN}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: OneDriveConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
data = {
|
||||
"drive": asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
||||
@@ -41,10 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .util import async_migration_in_progress, get_instance
|
||||
@@ -14,6 +14,8 @@ async def async_pre_backup(hass: HomeAssistant) -> None:
|
||||
"""Perform operations before a backup starts."""
|
||||
_LOGGER.info("Backup start notification, locking database for writes")
|
||||
instance = get_instance(hass)
|
||||
if hass.state is not CoreState.running:
|
||||
raise HomeAssistantError("Home Assistant is not running")
|
||||
if async_migration_in_progress(hass):
|
||||
raise HomeAssistantError("Database migration in progress")
|
||||
await instance.lock_database()
|
||||
|
||||
@@ -21,25 +21,24 @@ async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: SmartThingsConfigEntry, device: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device entry."""
|
||||
client = entry.runtime_data.client
|
||||
device_id = next(
|
||||
identifier for identifier in device.identifiers if identifier[0] == DOMAIN
|
||||
)[0]
|
||||
)[1]
|
||||
|
||||
device_status = await client.get_device_status(device_id)
|
||||
|
||||
events: list[DeviceEvent] = []
|
||||
|
||||
def register_event(event: DeviceEvent) -> None:
|
||||
events.append(event)
|
||||
|
||||
client = entry.runtime_data.client
|
||||
|
||||
listener = client.add_device_event_listener(device_id, register_event)
|
||||
|
||||
await asyncio.sleep(EVENT_WAIT_TIME)
|
||||
|
||||
listener()
|
||||
|
||||
device_status = await client.get_device_status(device_id)
|
||||
|
||||
status: dict[str, Any] = {}
|
||||
for component, capabilities in device_status.items():
|
||||
status[component] = {}
|
||||
|
||||
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/smartthings",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"requirements": ["pysmartthings==2.0.0"]
|
||||
"requirements": ["pysmartthings==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -461,7 +461,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
translation_key="media_input_source",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options_attribute=Attribute.SUPPORTED_INPUT_SOURCES,
|
||||
value_fn=lambda value: value.lower(),
|
||||
value_fn=lambda value: value.lower() if value else None,
|
||||
)
|
||||
]
|
||||
},
|
||||
@@ -580,6 +580,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["energy"] / 1000,
|
||||
suggested_display_precision=2,
|
||||
except_if_state_none=True,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
@@ -589,6 +590,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
value_fn=lambda value: value["power"],
|
||||
extra_state_attributes_fn=power_attributes,
|
||||
suggested_display_precision=2,
|
||||
except_if_state_none=True,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
@@ -598,6 +600,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["deltaEnergy"] / 1000,
|
||||
suggested_display_precision=2,
|
||||
except_if_state_none=True,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
@@ -607,6 +610,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["powerEnergy"] / 1000,
|
||||
suggested_display_precision=2,
|
||||
except_if_state_none=True,
|
||||
),
|
||||
SmartThingsSensorEntityDescription(
|
||||
@@ -616,6 +620,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value_fn=lambda value: value["energySaved"] / 1000,
|
||||
suggested_display_precision=2,
|
||||
except_if_state_none=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -8,6 +8,7 @@ from datetime import datetime, timedelta
|
||||
|
||||
from propcache.api import cached_property
|
||||
from teslemetry_stream import Signal
|
||||
from teslemetry_stream.const import ShiftState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
@@ -69,7 +70,7 @@ class TeslemetryVehicleSensorEntityDescription(SensorEntityDescription):
|
||||
polling_value_fn: Callable[[StateType], StateType] = lambda x: x
|
||||
polling_available_fn: Callable[[StateType], bool] = lambda x: x is not None
|
||||
streaming_key: Signal | None = None
|
||||
streaming_value_fn: Callable[[StateType], StateType] = lambda x: x
|
||||
streaming_value_fn: Callable[[str | int | float], StateType] = lambda x: x
|
||||
streaming_firmware: str = "2024.26"
|
||||
|
||||
|
||||
@@ -212,7 +213,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryVehicleSensorEntityDescription, ...] = (
|
||||
polling_available_fn=lambda x: True,
|
||||
polling_value_fn=lambda x: SHIFT_STATES.get(str(x), "p"),
|
||||
streaming_key=Signal.GEAR,
|
||||
streaming_value_fn=lambda x: SHIFT_STATES.get(str(x)),
|
||||
streaming_value_fn=lambda x: str(ShiftState.get(x, "P")).lower(),
|
||||
options=list(SHIFT_STATES.values()),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_registry_enabled_default=False,
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, NamedTuple
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from tuya_sharing import (
|
||||
CustomerDevice,
|
||||
@@ -11,6 +12,7 @@ from tuya_sharing import (
|
||||
SharingDeviceListener,
|
||||
SharingTokenListener,
|
||||
)
|
||||
from tuya_sharing.mq import SharingMQ, SharingMQConfig
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -45,13 +47,81 @@ class HomeAssistantTuyaData(NamedTuple):
|
||||
listener: SharingDeviceListener
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
|
||||
class ManagerCompat(Manager):
|
||||
"""Extended Manager class from the Tuya device sharing SDK.
|
||||
|
||||
The extension ensures compatibility a paho-mqtt client version >= 2.1.0.
|
||||
It overrides extend refresh_mq method to ensure correct paho.mqtt client calls.
|
||||
|
||||
This code can be removed when a version of tuya-device-sharing with
|
||||
https://github.com/tuya/tuya-device-sharing-sdk/pull/25 is available.
|
||||
"""
|
||||
|
||||
def refresh_mq(self):
|
||||
"""Refresh the MQTT connection."""
|
||||
if self.mq is not None:
|
||||
self.mq.stop()
|
||||
self.mq = None
|
||||
|
||||
home_ids = [home.id for home in self.user_homes]
|
||||
device = [
|
||||
device
|
||||
for device in self.device_map.values()
|
||||
if hasattr(device, "id") and getattr(device, "set_up", False)
|
||||
]
|
||||
|
||||
sharing_mq = SharingMQCompat(self.customer_api, home_ids, device)
|
||||
sharing_mq.start()
|
||||
sharing_mq.add_message_listener(self.on_message)
|
||||
self.mq = sharing_mq
|
||||
|
||||
|
||||
class SharingMQCompat(SharingMQ):
|
||||
"""Extended SharingMQ class from the Tuya device sharing SDK.
|
||||
|
||||
The extension ensures compatibility a paho-mqtt client version >= 2.1.0.
|
||||
It overrides _start method to ensure correct paho.mqtt client calls.
|
||||
|
||||
This code can be removed when a version of tuya-device-sharing with
|
||||
https://github.com/tuya/tuya-device-sharing-sdk/pull/25 is available.
|
||||
"""
|
||||
|
||||
def _start(self, mq_config: SharingMQConfig) -> mqtt.Client:
|
||||
"""Start the MQTT client."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
||||
|
||||
mqttc = mqtt.Client(client_id=mq_config.client_id)
|
||||
mqttc.username_pw_set(mq_config.username, mq_config.password)
|
||||
mqttc.user_data_set({"mqConfig": mq_config})
|
||||
mqttc.on_connect = self._on_connect
|
||||
mqttc.on_message = self._on_message
|
||||
mqttc.on_subscribe = self._on_subscribe
|
||||
mqttc.on_log = self._on_log
|
||||
mqttc.on_disconnect = self._on_disconnect
|
||||
|
||||
url = urlsplit(mq_config.url)
|
||||
if url.scheme == "ssl":
|
||||
mqttc.tls_set()
|
||||
|
||||
mqttc.connect(url.hostname, url.port)
|
||||
|
||||
mqttc.loop_start()
|
||||
return mqttc
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: TuyaConfigEntry) -> bool:
|
||||
"""Async setup hass config entry."""
|
||||
if CONF_APP_TYPE in entry.data:
|
||||
raise ConfigEntryAuthFailed("Authentication failed. Please re-authenticate.")
|
||||
|
||||
token_listener = TokenListener(hass, entry)
|
||||
manager = Manager(
|
||||
manager = ManagerCompat(
|
||||
TUYA_CLIENT_ID,
|
||||
entry.data[CONF_USER_CODE],
|
||||
entry.data[CONF_TERMINAL_ID],
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiounifi"],
|
||||
"requirements": ["aiounifi==82"],
|
||||
"requirements": ["aiounifi==83"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["weatherflow4py"],
|
||||
"requirements": ["weatherflow4py==1.0.6"]
|
||||
"requirements": ["weatherflow4py==1.3.1"]
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
METADATA_VERSION = "1"
|
||||
BACKUP_TIMEOUT = ClientTimeout(connect=10, total=43200)
|
||||
NAMESPACE = "https://home-assistant.io"
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
@@ -100,14 +101,14 @@ def _is_current_metadata_version(properties: list[Property]) -> bool:
|
||||
return any(
|
||||
prop.value == METADATA_VERSION
|
||||
for prop in properties
|
||||
if prop.namespace == "homeassistant" and prop.name == "metadata_version"
|
||||
if prop.namespace == NAMESPACE and prop.name == "metadata_version"
|
||||
)
|
||||
|
||||
|
||||
def _backup_id_from_properties(properties: list[Property]) -> str | None:
|
||||
"""Return the backup ID from properties."""
|
||||
for prop in properties:
|
||||
if prop.namespace == "homeassistant" and prop.name == "backup_id":
|
||||
if prop.namespace == NAMESPACE and prop.name == "backup_id":
|
||||
return prop.value
|
||||
return None
|
||||
|
||||
@@ -186,12 +187,12 @@ class WebDavBackupAgent(BackupAgent):
|
||||
f"{self._backup_path}/{filename_meta}",
|
||||
[
|
||||
Property(
|
||||
namespace="homeassistant",
|
||||
namespace=NAMESPACE,
|
||||
name="backup_id",
|
||||
value=backup.backup_id,
|
||||
),
|
||||
Property(
|
||||
namespace="homeassistant",
|
||||
namespace=NAMESPACE,
|
||||
name="metadata_version",
|
||||
value=METADATA_VERSION,
|
||||
),
|
||||
@@ -252,11 +253,11 @@ class WebDavBackupAgent(BackupAgent):
|
||||
self._backup_path,
|
||||
[
|
||||
PropertyRequest(
|
||||
namespace="homeassistant",
|
||||
namespace=NAMESPACE,
|
||||
name="metadata_version",
|
||||
),
|
||||
PropertyRequest(
|
||||
namespace="homeassistant",
|
||||
namespace=NAMESPACE,
|
||||
name="backup_id",
|
||||
),
|
||||
],
|
||||
|
||||
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 3
|
||||
PATCH_VERSION: Final = "0b1"
|
||||
PATCH_VERSION: Final = "0b2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)
|
||||
|
||||
@@ -59,6 +59,7 @@ INTENT_GET_CURRENT_DATE = "HassGetCurrentDate"
|
||||
INTENT_GET_CURRENT_TIME = "HassGetCurrentTime"
|
||||
INTENT_RESPOND = "HassRespond"
|
||||
INTENT_BROADCAST = "HassBroadcast"
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
|
||||
SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.components.calendar import (
|
||||
DOMAIN as CALENDAR_DOMAIN,
|
||||
SERVICE_GET_EVENTS,
|
||||
)
|
||||
from homeassistant.components.climate import INTENT_GET_TEMPERATURE
|
||||
from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER
|
||||
from homeassistant.components.homeassistant import async_should_expose
|
||||
from homeassistant.components.intent import async_device_supports_timers
|
||||
@@ -285,7 +284,7 @@ class AssistAPI(API):
|
||||
"""API exposing Assist API to LLMs."""
|
||||
|
||||
IGNORE_INTENTS = {
|
||||
INTENT_GET_TEMPERATURE,
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
INTENT_GET_WEATHER,
|
||||
INTENT_OPEN_COVER, # deprecated
|
||||
INTENT_CLOSE_COVER, # deprecated
|
||||
@@ -530,9 +529,11 @@ def _get_exposed_entities(
|
||||
info["areas"] = ", ".join(area_names)
|
||||
|
||||
if attributes := {
|
||||
attr_name: str(attr_value)
|
||||
if isinstance(attr_value, (Enum, Decimal, int))
|
||||
else attr_value
|
||||
attr_name: (
|
||||
str(attr_value)
|
||||
if isinstance(attr_value, (Enum, Decimal, int))
|
||||
else attr_value
|
||||
)
|
||||
for attr_name, attr_value in state.attributes.items()
|
||||
if attr_name in interesting_attributes
|
||||
}:
|
||||
|
||||
@@ -37,7 +37,7 @@ habluetooth==3.24.1
|
||||
hass-nabucasa==0.92.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250227.0
|
||||
home-assistant-frontend==20250228.0
|
||||
home-assistant-intents==2025.2.26
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.3.0b1"
|
||||
version = "2025.3.0b2"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
||||
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -264,7 +264,7 @@ aioharmony==0.4.1
|
||||
aiohasupervisor==0.3.0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.15.0
|
||||
aiohomeconnect==0.15.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.7
|
||||
@@ -404,7 +404,7 @@ aiotedee==0.2.20
|
||||
aiotractive==0.6.0
|
||||
|
||||
# homeassistant.components.unifi
|
||||
aiounifi==82
|
||||
aiounifi==83
|
||||
|
||||
# homeassistant.components.usb
|
||||
aiousbwatcher==1.1.1
|
||||
@@ -1152,7 +1152,7 @@ hole==0.8.0
|
||||
holidays==0.67
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250227.0
|
||||
home-assistant-frontend==20250228.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.2.26
|
||||
@@ -2310,7 +2310,7 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==2.0.0
|
||||
pysmartthings==2.1.0
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -3046,7 +3046,7 @@ waterfurnace==1.1.0
|
||||
watergate-local-api==2024.4.1
|
||||
|
||||
# homeassistant.components.weatherflow_cloud
|
||||
weatherflow4py==1.0.6
|
||||
weatherflow4py==1.3.1
|
||||
|
||||
# homeassistant.components.cisco_webex_teams
|
||||
webexpythonsdk==2.0.1
|
||||
@@ -3131,7 +3131,7 @@ youless-api==2.2.0
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2025.01.26
|
||||
yt-dlp[default]==2025.02.19
|
||||
|
||||
# homeassistant.components.zabbix
|
||||
zabbix-utils==2.0.2
|
||||
|
||||
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@@ -249,7 +249,7 @@ aioharmony==0.4.1
|
||||
aiohasupervisor==0.3.0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.15.0
|
||||
aiohomeconnect==0.15.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.7
|
||||
@@ -386,7 +386,7 @@ aiotedee==0.2.20
|
||||
aiotractive==0.6.0
|
||||
|
||||
# homeassistant.components.unifi
|
||||
aiounifi==82
|
||||
aiounifi==83
|
||||
|
||||
# homeassistant.components.usb
|
||||
aiousbwatcher==1.1.1
|
||||
@@ -981,7 +981,7 @@ hole==0.8.0
|
||||
holidays==0.67
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250227.0
|
||||
home-assistant-frontend==20250228.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.2.26
|
||||
@@ -1882,7 +1882,7 @@ pysma==0.7.5
|
||||
pysmappee==0.2.29
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==2.0.0
|
||||
pysmartthings==2.1.0
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@@ -2453,7 +2453,7 @@ watchdog==6.0.0
|
||||
watergate-local-api==2024.4.1
|
||||
|
||||
# homeassistant.components.weatherflow_cloud
|
||||
weatherflow4py==1.0.6
|
||||
weatherflow4py==1.3.1
|
||||
|
||||
# homeassistant.components.nasweb
|
||||
webio-api==0.1.11
|
||||
@@ -2526,7 +2526,7 @@ youless-api==2.2.0
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2025.01.26
|
||||
yt-dlp[default]==2025.02.19
|
||||
|
||||
# homeassistant.components.zamg
|
||||
zamg==0.3.6
|
||||
|
||||
@@ -8,6 +8,7 @@ from dataclasses import replace
|
||||
from io import StringIO
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
import tarfile
|
||||
from typing import Any
|
||||
from unittest.mock import (
|
||||
@@ -35,6 +36,7 @@ from homeassistant.components.backup.agent import BackupAgentError
|
||||
from homeassistant.components.backup.const import DATA_MANAGER
|
||||
from homeassistant.components.backup.manager import (
|
||||
BackupManagerError,
|
||||
BackupManagerExceptionGroup,
|
||||
BackupManagerState,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
@@ -1646,34 +1648,60 @@ async def test_exception_platform_pre(hass: HomeAssistant) -> None:
|
||||
assert str(err.value) == "Error during pre-backup: Test exception"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("unhandled_error", "expected_exception", "expected_msg"),
|
||||
[
|
||||
(None, BackupManagerError, "Error during post-backup: Test exception"),
|
||||
(
|
||||
HomeAssistantError("Boom"),
|
||||
BackupManagerExceptionGroup,
|
||||
(
|
||||
"Multiple errors when creating backup: Error during pre-backup: Boom, "
|
||||
"Error during post-backup: Test exception (2 sub-exceptions)"
|
||||
),
|
||||
),
|
||||
(
|
||||
Exception("Boom"),
|
||||
BackupManagerExceptionGroup,
|
||||
(
|
||||
"Multiple errors when creating backup: Error during pre-backup: Boom, "
|
||||
"Error during post-backup: Test exception (2 sub-exceptions)"
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_backup_generation")
|
||||
async def test_exception_platform_post(hass: HomeAssistant) -> None:
|
||||
async def test_exception_platform_post(
|
||||
hass: HomeAssistant,
|
||||
unhandled_error: Exception | None,
|
||||
expected_exception: type[Exception],
|
||||
expected_msg: str,
|
||||
) -> None:
|
||||
"""Test exception in post step."""
|
||||
|
||||
async def _mock_step(hass: HomeAssistant) -> None:
|
||||
raise HomeAssistantError("Test exception")
|
||||
|
||||
remote_agent = mock_backup_agent("remote")
|
||||
await setup_backup_platform(
|
||||
hass,
|
||||
domain="test",
|
||||
platform=Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=_mock_step,
|
||||
# We let the pre_backup fail to test that unhandled errors are not discarded
|
||||
# when post backup fails
|
||||
async_pre_backup=AsyncMock(side_effect=unhandled_error),
|
||||
async_post_backup=AsyncMock(
|
||||
side_effect=HomeAssistantError("Test exception")
|
||||
),
|
||||
async_get_backup_agents=AsyncMock(return_value=[remote_agent]),
|
||||
),
|
||||
)
|
||||
await setup_backup_integration(hass)
|
||||
|
||||
with pytest.raises(BackupManagerError) as err:
|
||||
with pytest.raises(expected_exception, match=re.escape(expected_msg)):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"create",
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert str(err.value) == "Error during post-backup: Test exception"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
|
||||
@@ -14,7 +14,6 @@ from homeassistant.components.climate import (
|
||||
HVACMode,
|
||||
intent as climate_intent,
|
||||
)
|
||||
from homeassistant.components.homeassistant.exposed_entities import async_expose_entity
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow
|
||||
from homeassistant.const import Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -131,335 +130,6 @@ class MockClimateEntityNoSetTemperature(ClimateEntity):
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
|
||||
|
||||
|
||||
async def test_get_temperature(
|
||||
hass: HomeAssistant,
|
||||
area_registry: ar.AreaRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
await climate_intent.async_setup_intents(hass)
|
||||
|
||||
climate_1 = MockClimateEntity()
|
||||
climate_1._attr_name = "Climate 1"
|
||||
climate_1._attr_unique_id = "1234"
|
||||
climate_1._attr_current_temperature = 10.0
|
||||
entity_registry.async_get_or_create(
|
||||
DOMAIN, "test", "1234", suggested_object_id="climate_1"
|
||||
)
|
||||
|
||||
climate_2 = MockClimateEntity()
|
||||
climate_2._attr_name = "Climate 2"
|
||||
climate_2._attr_unique_id = "5678"
|
||||
climate_2._attr_current_temperature = 22.0
|
||||
entity_registry.async_get_or_create(
|
||||
DOMAIN, "test", "5678", suggested_object_id="climate_2"
|
||||
)
|
||||
|
||||
await create_mock_platform(hass, [climate_1, climate_2])
|
||||
|
||||
# Add climate entities to different areas:
|
||||
# climate_1 => living room
|
||||
# climate_2 => bedroom
|
||||
# nothing in office
|
||||
living_room_area = area_registry.async_create(name="Living Room")
|
||||
bedroom_area = area_registry.async_create(name="Bedroom")
|
||||
office_area = area_registry.async_create(name="Office")
|
||||
|
||||
entity_registry.async_update_entity(
|
||||
climate_1.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
entity_registry.async_update_entity(climate_2.entity_id, area_id=bedroom_area.id)
|
||||
|
||||
# First climate entity will be selected (no area)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert response.matched_states
|
||||
assert response.matched_states[0].entity_id == climate_1.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 10.0
|
||||
|
||||
# Select by area (climate_2)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": bedroom_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 22.0
|
||||
|
||||
# Select by name (climate_2)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Climate 2"}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 22.0
|
||||
|
||||
# Check area with no climate entities
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": office_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Exception should contain details of what we tried to match
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name is None
|
||||
assert constraints.area_name == office_area.name
|
||||
assert constraints.domains and (set(constraints.domains) == {DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
# Check wrong name
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Does not exist"}},
|
||||
)
|
||||
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.NAME
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name == "Does not exist"
|
||||
assert constraints.area_name is None
|
||||
assert constraints.domains and (set(constraints.domains) == {DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
# Check wrong name with area
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Climate 1"}, "area": {"value": bedroom_area.name}},
|
||||
)
|
||||
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name == "Climate 1"
|
||||
assert constraints.area_name == bedroom_area.name
|
||||
assert constraints.domains and (set(constraints.domains) == {DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
|
||||
async def test_get_temperature_no_entities(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent with no climate entities."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
await climate_intent.async_setup_intents(hass)
|
||||
|
||||
await create_mock_platform(hass, [])
|
||||
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.DOMAIN
|
||||
|
||||
|
||||
async def test_not_exposed(
|
||||
hass: HomeAssistant,
|
||||
area_registry: ar.AreaRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent when entities aren't exposed."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
await climate_intent.async_setup_intents(hass)
|
||||
|
||||
climate_1 = MockClimateEntity()
|
||||
climate_1._attr_name = "Climate 1"
|
||||
climate_1._attr_unique_id = "1234"
|
||||
climate_1._attr_current_temperature = 10.0
|
||||
entity_registry.async_get_or_create(
|
||||
DOMAIN, "test", "1234", suggested_object_id="climate_1"
|
||||
)
|
||||
|
||||
climate_2 = MockClimateEntity()
|
||||
climate_2._attr_name = "Climate 2"
|
||||
climate_2._attr_unique_id = "5678"
|
||||
climate_2._attr_current_temperature = 22.0
|
||||
entity_registry.async_get_or_create(
|
||||
DOMAIN, "test", "5678", suggested_object_id="climate_2"
|
||||
)
|
||||
|
||||
await create_mock_platform(hass, [climate_1, climate_2])
|
||||
|
||||
# Add climate entities to same area
|
||||
living_room_area = area_registry.async_create(name="Living Room")
|
||||
bedroom_area = area_registry.async_create(name="Bedroom")
|
||||
entity_registry.async_update_entity(
|
||||
climate_1.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
climate_2.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
|
||||
# Should fail with empty name
|
||||
with pytest.raises(intent.InvalidSlotInfo):
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": ""}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Should fail with empty area
|
||||
with pytest.raises(intent.InvalidSlotInfo):
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": ""}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Expose second, hide first
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, False)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, True)
|
||||
|
||||
# Second climate entity is exposed
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the area should work
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": living_room_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the name of the exposed entity should work
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": climate_2.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the name of the *unexposed* entity should fail
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": climate_1.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Expose first, hide second
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, True)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, False)
|
||||
|
||||
# Second climate entity is exposed
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_1.entity_id
|
||||
|
||||
# Wrong area name
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": bedroom_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
|
||||
# Neither are exposed
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, False)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, False)
|
||||
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Should fail with area
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": living_room_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Should fail with both names
|
||||
for name in (climate_1.name, climate_2.name):
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
climate_intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
|
||||
async def test_set_temperature(
|
||||
hass: HomeAssistant,
|
||||
area_registry: ar.AreaRegistry,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.ARRAY: 'ARRAY'>, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format='lower', items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=[Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.NUMBER: 'NUMBER'>, description=None, enum=None, format=None, items=None, properties=None, required=None), Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.INTEGER: 'INTEGER'>, description=None, enum=None, format=None, items=None, properties=None, required=None)], max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.ARRAY: 'ARRAY'>, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=[Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.NUMBER: 'NUMBER'>, description=None, enum=None, format=None, items=None, properties=None, required=None), Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.INTEGER: 'INTEGER'>, description=None, enum=None, format=None, items=None, properties=None, required=None)], max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.OBJECT: 'OBJECT'>, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=<Type.STRING: 'STRING'>, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'history': list([
|
||||
]),
|
||||
'model': 'models/gemini-2.0-flash',
|
||||
|
||||
@@ -493,6 +493,42 @@ async def test_escape_decode() -> None:
|
||||
{"type": "string", "enum": ["a", "b", "c"]},
|
||||
{"type": "STRING", "enum": ["a", "b", "c"]},
|
||||
),
|
||||
(
|
||||
{"type": "string", "format": "enum", "enum": ["a", "b", "c"]},
|
||||
{"type": "STRING", "format": "enum", "enum": ["a", "b", "c"]},
|
||||
),
|
||||
(
|
||||
{"type": "string", "format": "date-time"},
|
||||
{"type": "STRING", "format": "date-time"},
|
||||
),
|
||||
(
|
||||
{"type": "string", "format": "byte"},
|
||||
{"type": "STRING"},
|
||||
),
|
||||
(
|
||||
{"type": "number", "format": "float"},
|
||||
{"type": "NUMBER", "format": "float"},
|
||||
),
|
||||
(
|
||||
{"type": "number", "format": "double"},
|
||||
{"type": "NUMBER", "format": "double"},
|
||||
),
|
||||
(
|
||||
{"type": "number", "format": "hex"},
|
||||
{"type": "NUMBER"},
|
||||
),
|
||||
(
|
||||
{"type": "integer", "format": "int32"},
|
||||
{"type": "INTEGER", "format": "int32"},
|
||||
),
|
||||
(
|
||||
{"type": "integer", "format": "int64"},
|
||||
{"type": "INTEGER", "format": "int64"},
|
||||
),
|
||||
(
|
||||
{"type": "integer", "format": "int8"},
|
||||
{"type": "INTEGER"},
|
||||
),
|
||||
(
|
||||
{"type": "integer", "enum": [1, 2, 3]},
|
||||
{"type": "STRING", "enum": ["1", "2", "3"]},
|
||||
@@ -515,11 +551,11 @@ async def test_escape_decode() -> None:
|
||||
]
|
||||
},
|
||||
),
|
||||
({"type": "string", "format": "lower"}, {"format": "lower", "type": "STRING"}),
|
||||
({"type": "boolean", "format": "bool"}, {"format": "bool", "type": "BOOLEAN"}),
|
||||
({"type": "string", "format": "lower"}, {"type": "STRING"}),
|
||||
({"type": "boolean", "format": "bool"}, {"type": "BOOLEAN"}),
|
||||
(
|
||||
{"type": "number", "format": "percent"},
|
||||
{"type": "NUMBER", "format": "percent"},
|
||||
{"type": "NUMBER"},
|
||||
),
|
||||
(
|
||||
{
|
||||
|
||||
456
tests/components/intent/test_temperature.py
Normal file
456
tests/components/intent/test_temperature.py
Normal file
@@ -0,0 +1,456 @@
|
||||
"""Test temperature intents."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.components.homeassistant.exposed_entities import async_expose_entity
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow
|
||||
from homeassistant.const import Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import area_registry as ar, entity_registry as er, intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
MockModule,
|
||||
MockPlatform,
|
||||
mock_config_flow,
|
||||
mock_integration,
|
||||
mock_platform,
|
||||
)
|
||||
|
||||
TEST_DOMAIN = "test"
|
||||
|
||||
|
||||
class MockFlow(ConfigFlow):
|
||||
"""Test flow."""
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def config_flow_fixture(hass: HomeAssistant) -> Generator[None]:
|
||||
"""Mock config flow."""
|
||||
mock_platform(hass, f"{TEST_DOMAIN}.config_flow")
|
||||
|
||||
with mock_config_flow(TEST_DOMAIN, MockFlow):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_setup_integration(hass: HomeAssistant) -> None:
|
||||
"""Fixture to set up a mock integration."""
|
||||
|
||||
async def async_setup_entry_init(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> bool:
|
||||
"""Set up test config entry."""
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
config_entry, [CLIMATE_DOMAIN]
|
||||
)
|
||||
return True
|
||||
|
||||
async def async_unload_entry_init(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
) -> bool:
|
||||
await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO])
|
||||
return True
|
||||
|
||||
mock_platform(hass, f"{TEST_DOMAIN}.config_flow")
|
||||
mock_integration(
|
||||
hass,
|
||||
MockModule(
|
||||
TEST_DOMAIN,
|
||||
async_setup_entry=async_setup_entry_init,
|
||||
async_unload_entry=async_unload_entry_init,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def create_mock_platform(
|
||||
hass: HomeAssistant,
|
||||
entities: list[ClimateEntity],
|
||||
) -> MockConfigEntry:
|
||||
"""Create a todo platform with the specified entities."""
|
||||
|
||||
async def async_setup_entry_platform(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up test event platform via config entry."""
|
||||
async_add_entities(entities)
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
f"{TEST_DOMAIN}.{CLIMATE_DOMAIN}",
|
||||
MockPlatform(async_setup_entry=async_setup_entry_platform),
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(domain=TEST_DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
|
||||
class MockClimateEntity(ClimateEntity):
|
||||
"""Mock Climate device to use in tests."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_mode = HVACMode.OFF
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the thermostat temperature."""
|
||||
value = kwargs[ATTR_TEMPERATURE]
|
||||
self._attr_target_temperature = value
|
||||
|
||||
|
||||
class MockClimateEntityNoSetTemperature(ClimateEntity):
|
||||
"""Mock Climate device to use in tests."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_mode = HVACMode.OFF
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
|
||||
|
||||
|
||||
async def test_get_temperature(
|
||||
hass: HomeAssistant,
|
||||
area_registry: ar.AreaRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
climate_1 = MockClimateEntity()
|
||||
climate_1._attr_name = "Climate 1"
|
||||
climate_1._attr_unique_id = "1234"
|
||||
climate_1._attr_current_temperature = 10.0
|
||||
entity_registry.async_get_or_create(
|
||||
CLIMATE_DOMAIN, "test", "1234", suggested_object_id="climate_1"
|
||||
)
|
||||
|
||||
climate_2 = MockClimateEntity()
|
||||
climate_2._attr_name = "Climate 2"
|
||||
climate_2._attr_unique_id = "5678"
|
||||
climate_2._attr_current_temperature = 22.0
|
||||
entity_registry.async_get_or_create(
|
||||
CLIMATE_DOMAIN, "test", "5678", suggested_object_id="climate_2"
|
||||
)
|
||||
|
||||
await create_mock_platform(hass, [climate_1, climate_2])
|
||||
|
||||
# Add climate entities to different areas:
|
||||
# climate_1 => living room
|
||||
# climate_2 => bedroom
|
||||
# nothing in office
|
||||
living_room_area = area_registry.async_create(name="Living Room")
|
||||
bedroom_area = area_registry.async_create(name="Bedroom")
|
||||
office_area = area_registry.async_create(name="Office")
|
||||
|
||||
entity_registry.async_update_entity(
|
||||
climate_1.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
entity_registry.async_update_entity(climate_2.entity_id, area_id=bedroom_area.id)
|
||||
|
||||
# First climate entity will be selected (no area)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert response.matched_states
|
||||
assert response.matched_states[0].entity_id == climate_1.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 10.0
|
||||
|
||||
# Select by area (climate_2)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": bedroom_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 22.0
|
||||
|
||||
# Select by name (climate_2)
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Climate 2"}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
state = response.matched_states[0]
|
||||
assert state.attributes["current_temperature"] == 22.0
|
||||
|
||||
# Check area with no climate entities
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": office_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Exception should contain details of what we tried to match
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name is None
|
||||
assert constraints.area_name == office_area.name
|
||||
assert constraints.domains and (set(constraints.domains) == {CLIMATE_DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
# Check wrong name
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Does not exist"}},
|
||||
)
|
||||
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.NAME
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name == "Does not exist"
|
||||
assert constraints.area_name is None
|
||||
assert constraints.domains and (set(constraints.domains) == {CLIMATE_DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
# Check wrong name with area
|
||||
with pytest.raises(intent.MatchFailedError) as error:
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": "Climate 1"}, "area": {"value": bedroom_area.name}},
|
||||
)
|
||||
|
||||
assert isinstance(error.value, intent.MatchFailedError)
|
||||
assert error.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
constraints = error.value.constraints
|
||||
assert constraints.name == "Climate 1"
|
||||
assert constraints.area_name == bedroom_area.name
|
||||
assert constraints.domains and (set(constraints.domains) == {CLIMATE_DOMAIN})
|
||||
assert constraints.device_classes is None
|
||||
|
||||
|
||||
async def test_get_temperature_no_entities(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent with no climate entities."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
await create_mock_platform(hass, [])
|
||||
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.DOMAIN
|
||||
|
||||
|
||||
async def test_not_exposed(
|
||||
hass: HomeAssistant,
|
||||
area_registry: ar.AreaRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test HassClimateGetTemperature intent when entities aren't exposed."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
climate_1 = MockClimateEntity()
|
||||
climate_1._attr_name = "Climate 1"
|
||||
climate_1._attr_unique_id = "1234"
|
||||
climate_1._attr_current_temperature = 10.0
|
||||
entity_registry.async_get_or_create(
|
||||
CLIMATE_DOMAIN, "test", "1234", suggested_object_id="climate_1"
|
||||
)
|
||||
|
||||
climate_2 = MockClimateEntity()
|
||||
climate_2._attr_name = "Climate 2"
|
||||
climate_2._attr_unique_id = "5678"
|
||||
climate_2._attr_current_temperature = 22.0
|
||||
entity_registry.async_get_or_create(
|
||||
CLIMATE_DOMAIN, "test", "5678", suggested_object_id="climate_2"
|
||||
)
|
||||
|
||||
await create_mock_platform(hass, [climate_1, climate_2])
|
||||
|
||||
# Add climate entities to same area
|
||||
living_room_area = area_registry.async_create(name="Living Room")
|
||||
bedroom_area = area_registry.async_create(name="Bedroom")
|
||||
entity_registry.async_update_entity(
|
||||
climate_1.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
climate_2.entity_id, area_id=living_room_area.id
|
||||
)
|
||||
|
||||
# Should fail with empty name
|
||||
with pytest.raises(intent.InvalidSlotInfo):
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": ""}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Should fail with empty area
|
||||
with pytest.raises(intent.InvalidSlotInfo):
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": ""}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
|
||||
# Expose second, hide first
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, False)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, True)
|
||||
|
||||
# Second climate entity is exposed
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the area should work
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": living_room_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the name of the exposed entity should work
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": climate_2.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_2.entity_id
|
||||
|
||||
# Using the name of the *unexposed* entity should fail
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": climate_1.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Expose first, hide second
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, True)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, False)
|
||||
|
||||
# Second climate entity is exposed
|
||||
response = await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert response.response_type == intent.IntentResponseType.QUERY_ANSWER
|
||||
assert len(response.matched_states) == 1
|
||||
assert response.matched_states[0].entity_id == climate_1.entity_id
|
||||
|
||||
# Wrong area name
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": bedroom_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.AREA
|
||||
|
||||
# Neither are exposed
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, False)
|
||||
async_expose_entity(hass, conversation.DOMAIN, climate_2.entity_id, False)
|
||||
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Should fail with area
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"area": {"value": living_room_area.name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
|
||||
# Should fail with both names
|
||||
for name in (climate_1.name, climate_2.name):
|
||||
with pytest.raises(intent.MatchFailedError) as err:
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
intent.INTENT_GET_TEMPERATURE,
|
||||
{"name": {"value": name}},
|
||||
assistant=conversation.DOMAIN,
|
||||
)
|
||||
assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT
|
||||
@@ -8,7 +8,15 @@ from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from music_assistant_models.api import MassEvent
|
||||
from music_assistant_models.enums import EventType
|
||||
from music_assistant_models.media_items import Album, Artist, Playlist, Radio, Track
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
Audiobook,
|
||||
Playlist,
|
||||
Podcast,
|
||||
Radio,
|
||||
Track,
|
||||
)
|
||||
from music_assistant_models.player import Player
|
||||
from music_assistant_models.player_queue import PlayerQueue
|
||||
from syrupy import SnapshotAssertion
|
||||
@@ -62,6 +70,10 @@ async def setup_integration_from_fixtures(
|
||||
music.get_playlist_tracks = AsyncMock(return_value=library_playlist_tracks)
|
||||
library_radios = create_library_radios_from_fixture()
|
||||
music.get_library_radios = AsyncMock(return_value=library_radios)
|
||||
library_audiobooks = create_library_audiobooks_from_fixture()
|
||||
music.get_library_audiobooks = AsyncMock(return_value=library_audiobooks)
|
||||
library_podcasts = create_library_podcasts_from_fixture()
|
||||
music.get_library_podcasts = AsyncMock(return_value=library_podcasts)
|
||||
music.get_item_by_uri = AsyncMock()
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
@@ -132,6 +144,18 @@ def create_library_radios_from_fixture() -> list[Radio]:
|
||||
return [Radio.from_dict(radio_data) for radio_data in fixture_data]
|
||||
|
||||
|
||||
def create_library_audiobooks_from_fixture() -> list[Audiobook]:
|
||||
"""Create MA Audiobooks from fixture."""
|
||||
fixture_data = load_and_parse_fixture("library_audiobooks")
|
||||
return [Audiobook.from_dict(radio_data) for radio_data in fixture_data]
|
||||
|
||||
|
||||
def create_library_podcasts_from_fixture() -> list[Podcast]:
|
||||
"""Create MA Podcasts from fixture."""
|
||||
fixture_data = load_and_parse_fixture("library_podcasts")
|
||||
return [Podcast.from_dict(radio_data) for radio_data in fixture_data]
|
||||
|
||||
|
||||
async def trigger_subscription_callback(
|
||||
hass: HomeAssistant,
|
||||
client: MagicMock,
|
||||
|
||||
@@ -0,0 +1,489 @@
|
||||
{
|
||||
"library_audiobooks": [
|
||||
{
|
||||
"item_id": "1",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook",
|
||||
"uri": "library://audiobook/1",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "test-audiobook.mp3",
|
||||
"provider_domain": "filesystem_smb",
|
||||
"provider_instance": "filesystem_smb--7Kf8QySu",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "mp3",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 48000,
|
||||
"bit_depth": 16,
|
||||
"channels": 1,
|
||||
"output_format_str": "mp3",
|
||||
"bit_rate": 90304
|
||||
},
|
||||
"url": null,
|
||||
"details": "1738502411"
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "Cover (front)",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "test-audiobook.mp3",
|
||||
"provider": "filesystem_smb--7Kf8QySu",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": [],
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": "",
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": null,
|
||||
"authors": ["TestWriter"],
|
||||
"narrators": [],
|
||||
"duration": 9,
|
||||
"fully_played": true,
|
||||
"resume_position_ms": 9000
|
||||
},
|
||||
{
|
||||
"item_id": "11",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook 0",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook 0",
|
||||
"uri": "library://audiobook/11",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "0",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "This is a description for Test Audiobook",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [
|
||||
{
|
||||
"position": 1,
|
||||
"name": "Chapter 1",
|
||||
"start": 10.0,
|
||||
"end": 20.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 2",
|
||||
"start": 20.0,
|
||||
"end": 40.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 3",
|
||||
"start": 40.0,
|
||||
"end": null
|
||||
}
|
||||
],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"authors": ["AudioBook Author"],
|
||||
"narrators": ["AudioBook Narrator"],
|
||||
"duration": 60,
|
||||
"fully_played": null,
|
||||
"resume_position_ms": null
|
||||
},
|
||||
{
|
||||
"item_id": "12",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook 1",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook 1",
|
||||
"uri": "library://audiobook/12",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "1",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "This is a description for Test Audiobook",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [
|
||||
{
|
||||
"position": 1,
|
||||
"name": "Chapter 1",
|
||||
"start": 10.0,
|
||||
"end": 20.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 2",
|
||||
"start": 20.0,
|
||||
"end": 40.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 3",
|
||||
"start": 40.0,
|
||||
"end": null
|
||||
}
|
||||
],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"authors": ["AudioBook Author"],
|
||||
"narrators": ["AudioBook Narrator"],
|
||||
"duration": 60,
|
||||
"fully_played": null,
|
||||
"resume_position_ms": null
|
||||
},
|
||||
{
|
||||
"item_id": "13",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook 2",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook 2",
|
||||
"uri": "library://audiobook/13",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "2",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "This is a description for Test Audiobook",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [
|
||||
{
|
||||
"position": 1,
|
||||
"name": "Chapter 1",
|
||||
"start": 10.0,
|
||||
"end": 20.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 2",
|
||||
"start": 20.0,
|
||||
"end": 40.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 3",
|
||||
"start": 40.0,
|
||||
"end": null
|
||||
}
|
||||
],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"authors": ["AudioBook Author"],
|
||||
"narrators": ["AudioBook Narrator"],
|
||||
"duration": 60,
|
||||
"fully_played": null,
|
||||
"resume_position_ms": null
|
||||
},
|
||||
{
|
||||
"item_id": "14",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook 3",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook 3",
|
||||
"uri": "library://audiobook/14",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "3",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "This is a description for Test Audiobook",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [
|
||||
{
|
||||
"position": 1,
|
||||
"name": "Chapter 1",
|
||||
"start": 10.0,
|
||||
"end": 20.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 2",
|
||||
"start": 20.0,
|
||||
"end": 40.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 3",
|
||||
"start": 40.0,
|
||||
"end": null
|
||||
}
|
||||
],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"authors": ["AudioBook Author"],
|
||||
"narrators": ["AudioBook Narrator"],
|
||||
"duration": 60,
|
||||
"fully_played": null,
|
||||
"resume_position_ms": null
|
||||
},
|
||||
{
|
||||
"item_id": "15",
|
||||
"provider": "library",
|
||||
"name": "Test Audiobook 4",
|
||||
"version": "",
|
||||
"sort_name": "test audiobook 4",
|
||||
"uri": "library://audiobook/15",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "audiobook",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "4",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": "This is a description for Test Audiobook",
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": [
|
||||
{
|
||||
"position": 1,
|
||||
"name": "Chapter 1",
|
||||
"start": 10.0,
|
||||
"end": 20.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 2",
|
||||
"start": 20.0,
|
||||
"end": 40.0
|
||||
},
|
||||
{
|
||||
"position": 2,
|
||||
"name": "Chapter 3",
|
||||
"start": 40.0,
|
||||
"end": null
|
||||
}
|
||||
],
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"authors": ["AudioBook Author"],
|
||||
"narrators": ["AudioBook Narrator"],
|
||||
"duration": 60,
|
||||
"fully_played": null,
|
||||
"resume_position_ms": null
|
||||
}
|
||||
]
|
||||
}
|
||||
309
tests/components/music_assistant/fixtures/library_podcasts.json
Normal file
309
tests/components/music_assistant/fixtures/library_podcasts.json
Normal file
@@ -0,0 +1,309 @@
|
||||
{
|
||||
"library_podcasts": [
|
||||
{
|
||||
"item_id": "6",
|
||||
"provider": "library",
|
||||
"name": "Test Podcast 0",
|
||||
"version": "",
|
||||
"sort_name": "test podcast 0",
|
||||
"uri": "library://podcast/6",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "podcast",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "0",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": null,
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": null,
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"total_episodes": null
|
||||
},
|
||||
{
|
||||
"item_id": "7",
|
||||
"provider": "library",
|
||||
"name": "Test Podcast 1",
|
||||
"version": "",
|
||||
"sort_name": "test podcast 1",
|
||||
"uri": "library://podcast/7",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "podcast",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "1",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": null,
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": null,
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"total_episodes": null
|
||||
},
|
||||
{
|
||||
"item_id": "8",
|
||||
"provider": "library",
|
||||
"name": "Test Podcast 2",
|
||||
"version": "",
|
||||
"sort_name": "test podcast 2",
|
||||
"uri": "library://podcast/8",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "podcast",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "2",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": null,
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": null,
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"total_episodes": null
|
||||
},
|
||||
{
|
||||
"item_id": "9",
|
||||
"provider": "library",
|
||||
"name": "Test Podcast 3",
|
||||
"version": "",
|
||||
"sort_name": "test podcast 3",
|
||||
"uri": "library://podcast/9",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "podcast",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "3",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": null,
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": null,
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"total_episodes": null
|
||||
},
|
||||
{
|
||||
"item_id": "10",
|
||||
"provider": "library",
|
||||
"name": "Test Podcast 4",
|
||||
"version": "",
|
||||
"sort_name": "test podcast 4",
|
||||
"uri": "library://podcast/10",
|
||||
"external_ids": [],
|
||||
"is_playable": true,
|
||||
"media_type": "podcast",
|
||||
"provider_mappings": [
|
||||
{
|
||||
"item_id": "4",
|
||||
"provider_domain": "test",
|
||||
"provider_instance": "test",
|
||||
"available": true,
|
||||
"audio_format": {
|
||||
"content_type": "?",
|
||||
"codec_type": "?",
|
||||
"sample_rate": 44100,
|
||||
"bit_depth": 16,
|
||||
"channels": 2,
|
||||
"output_format_str": "?",
|
||||
"bit_rate": 0
|
||||
},
|
||||
"url": null,
|
||||
"details": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"description": null,
|
||||
"review": null,
|
||||
"explicit": null,
|
||||
"images": [
|
||||
{
|
||||
"type": "thumb",
|
||||
"path": "logo.png",
|
||||
"provider": "builtin",
|
||||
"remotely_accessible": false
|
||||
}
|
||||
],
|
||||
"genres": null,
|
||||
"mood": null,
|
||||
"style": null,
|
||||
"copyright": null,
|
||||
"lyrics": null,
|
||||
"label": null,
|
||||
"links": null,
|
||||
"performers": null,
|
||||
"preview": null,
|
||||
"popularity": null,
|
||||
"release_date": null,
|
||||
"languages": null,
|
||||
"chapters": null,
|
||||
"last_refresh": null
|
||||
},
|
||||
"favorite": false,
|
||||
"position": null,
|
||||
"publisher": "Test Publisher",
|
||||
"total_episodes": null
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,195 @@
|
||||
# serializer version: 1
|
||||
# name: test_get_library_action
|
||||
# name: test_get_library_action[album]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'artists': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.ARTIST: 'artist'>,
|
||||
'name': 'A Space Love Adventure',
|
||||
'uri': 'library://artist/289',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'image': None,
|
||||
'media_type': <MediaType.ALBUM: 'album'>,
|
||||
'name': 'Synth Punk EP',
|
||||
'uri': 'library://album/396',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'artists': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.ARTIST: 'artist'>,
|
||||
'name': 'Various Artists',
|
||||
'uri': 'library://artist/96',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'image': None,
|
||||
'media_type': <MediaType.ALBUM: 'album'>,
|
||||
'name': 'Synthwave (The 80S Revival)',
|
||||
'uri': 'library://album/95',
|
||||
'version': 'The 80S Revival',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.ALBUM: 'album'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[artist]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.ARTIST: 'artist'>,
|
||||
'name': 'W O L F C L U B',
|
||||
'uri': 'library://artist/127',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.ARTIST: 'artist'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[audiobook]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook',
|
||||
'uri': 'library://audiobook/1',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook 0',
|
||||
'uri': 'library://audiobook/11',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook 1',
|
||||
'uri': 'library://audiobook/12',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook 2',
|
||||
'uri': 'library://audiobook/13',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook 3',
|
||||
'uri': 'library://audiobook/14',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'name': 'Test Audiobook 4',
|
||||
'uri': 'library://audiobook/15',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.AUDIOBOOK: 'audiobook'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[playlist]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PLAYLIST: 'playlist'>,
|
||||
'name': '1970s Rock Hits',
|
||||
'uri': 'library://playlist/40',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.PLAYLIST: 'playlist'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[podcast]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'name': 'Test Podcast 0',
|
||||
'uri': 'library://podcast/6',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'name': 'Test Podcast 1',
|
||||
'uri': 'library://podcast/7',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'name': 'Test Podcast 2',
|
||||
'uri': 'library://podcast/8',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'name': 'Test Podcast 3',
|
||||
'uri': 'library://podcast/9',
|
||||
'version': '',
|
||||
}),
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'name': 'Test Podcast 4',
|
||||
'uri': 'library://podcast/10',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.PODCAST: 'podcast'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[radio]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
'image': None,
|
||||
'media_type': <MediaType.RADIO: 'radio'>,
|
||||
'name': 'fm4 | ORF | HQ',
|
||||
'uri': 'library://radio/1',
|
||||
'version': '',
|
||||
}),
|
||||
]),
|
||||
'limit': 25,
|
||||
'media_type': <MediaType.RADIO: 'radio'>,
|
||||
'offset': 0,
|
||||
'order_by': 'name',
|
||||
})
|
||||
# ---
|
||||
# name: test_get_library_action[track]
|
||||
dict({
|
||||
'items': list([
|
||||
dict({
|
||||
@@ -192,8 +382,12 @@
|
||||
]),
|
||||
'artists': list([
|
||||
]),
|
||||
'audiobooks': list([
|
||||
]),
|
||||
'playlists': list([
|
||||
]),
|
||||
'podcasts': list([
|
||||
]),
|
||||
'radio': list([
|
||||
]),
|
||||
'tracks': list([
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from music_assistant_models.media_items import SearchResults
|
||||
import pytest
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.music_assistant.actions import (
|
||||
@@ -47,9 +48,22 @@ async def test_search_action(
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"media_type",
|
||||
[
|
||||
"artist",
|
||||
"album",
|
||||
"track",
|
||||
"playlist",
|
||||
"audiobook",
|
||||
"podcast",
|
||||
"radio",
|
||||
],
|
||||
)
|
||||
async def test_get_library_action(
|
||||
hass: HomeAssistant,
|
||||
music_assistant_client: MagicMock,
|
||||
media_type: str,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test music assistant get_library action."""
|
||||
@@ -60,7 +74,7 @@ async def test_get_library_action(
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: entry.entry_id,
|
||||
ATTR_FAVORITE: False,
|
||||
ATTR_MEDIA_TYPE: "track",
|
||||
ATTR_MEDIA_TYPE: media_type,
|
||||
},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
|
||||
31
tests/components/onedrive/snapshots/test_diagnostics.ambr
Normal file
31
tests/components/onedrive/snapshots/test_diagnostics.ambr
Normal file
@@ -0,0 +1,31 @@
|
||||
# serializer version: 1
|
||||
# name: test_diagnostics
|
||||
dict({
|
||||
'config': dict({
|
||||
'auth_implementation': 'onedrive',
|
||||
'folder_id': 'my_folder_id',
|
||||
'folder_name': 'name',
|
||||
'token': '**REDACTED**',
|
||||
}),
|
||||
'drive': dict({
|
||||
'drive_type': 'personal',
|
||||
'id': 'mock_drive_id',
|
||||
'name': 'My Drive',
|
||||
'owner': dict({
|
||||
'application': None,
|
||||
'user': dict({
|
||||
'display_name': '**REDACTED**',
|
||||
'email': '**REDACTED**',
|
||||
'id': 'id',
|
||||
}),
|
||||
}),
|
||||
'quota': dict({
|
||||
'deleted': 5,
|
||||
'remaining': 805306368,
|
||||
'state': 'nearing',
|
||||
'total': 5368709120,
|
||||
'used': 4250000000,
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
26
tests/components/onedrive/test_diagnostics.py
Normal file
26
tests/components/onedrive/test_diagnostics.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Tests for the diagnostics data provided by the OneDrive integration."""
|
||||
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.diagnostics import get_diagnostics_for_config_entry
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
async def test_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test diagnostics."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
assert (
|
||||
await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry)
|
||||
== snapshot
|
||||
)
|
||||
@@ -236,7 +236,6 @@ async def test_data_cap_issues(
|
||||
|
||||
async def test_1_1_to_1_2_migration(
|
||||
hass: HomeAssistant,
|
||||
mock_onedrive_client: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_folder: Folder,
|
||||
) -> None:
|
||||
@@ -251,12 +250,34 @@ async def test_1_1_to_1_2_migration(
|
||||
},
|
||||
)
|
||||
|
||||
await setup_integration(hass, old_config_entry)
|
||||
assert old_config_entry.data[CONF_FOLDER_ID] == mock_folder.id
|
||||
assert old_config_entry.data[CONF_FOLDER_NAME] == mock_folder.name
|
||||
assert old_config_entry.minor_version == 2
|
||||
|
||||
|
||||
async def test_1_1_to_1_2_migration_failure(
|
||||
hass: HomeAssistant,
|
||||
mock_onedrive_client: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test migration from 1.1 to 1.2 failure."""
|
||||
old_config_entry = MockConfigEntry(
|
||||
unique_id="mock_drive_id",
|
||||
title="John Doe's OneDrive",
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"auth_implementation": mock_config_entry.data["auth_implementation"],
|
||||
"token": mock_config_entry.data["token"],
|
||||
},
|
||||
)
|
||||
|
||||
# will always 404 after migration, because of dummy id
|
||||
mock_onedrive_client.get_drive_item.side_effect = NotFoundError(404, "Not found")
|
||||
|
||||
await setup_integration(hass, old_config_entry)
|
||||
assert old_config_entry.data[CONF_FOLDER_ID] == mock_folder.id
|
||||
assert old_config_entry.data[CONF_FOLDER_NAME] == mock_folder.name
|
||||
assert old_config_entry.state is ConfigEntryState.MIGRATION_ERROR
|
||||
assert old_config_entry.minor_version == 1
|
||||
|
||||
|
||||
async def test_migration_guard_against_major_downgrade(
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
"""Test backup platform for the Recorder integration."""
|
||||
|
||||
from contextlib import AbstractContextManager, nullcontext as does_not_raise
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.components.recorder.backup import async_post_backup, async_pre_backup
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@@ -19,6 +20,41 @@ async def test_async_pre_backup(recorder_mock: Recorder, hass: HomeAssistant) ->
|
||||
assert lock_mock.called
|
||||
|
||||
|
||||
RAISES_HASS_NOT_RUNNING = pytest.raises(
|
||||
HomeAssistantError, match="Home Assistant is not running"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("core_state", "expected_result", "lock_calls"),
|
||||
[
|
||||
(CoreState.final_write, RAISES_HASS_NOT_RUNNING, 0),
|
||||
(CoreState.not_running, RAISES_HASS_NOT_RUNNING, 0),
|
||||
(CoreState.running, does_not_raise(), 1),
|
||||
(CoreState.starting, RAISES_HASS_NOT_RUNNING, 0),
|
||||
(CoreState.stopped, RAISES_HASS_NOT_RUNNING, 0),
|
||||
(CoreState.stopping, RAISES_HASS_NOT_RUNNING, 0),
|
||||
],
|
||||
)
|
||||
async def test_async_pre_backup_core_state(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
core_state: CoreState,
|
||||
expected_result: AbstractContextManager,
|
||||
lock_calls: int,
|
||||
) -> None:
|
||||
"""Test pre backup in different core states."""
|
||||
hass.set_state(core_state)
|
||||
with ( # pylint: disable=confusing-with-statement
|
||||
patch(
|
||||
"homeassistant.components.recorder.core.Recorder.lock_database"
|
||||
) as lock_mock,
|
||||
expected_result,
|
||||
):
|
||||
await async_pre_backup(hass)
|
||||
assert len(lock_mock.mock_calls) == lock_calls
|
||||
|
||||
|
||||
async def test_async_pre_backup_with_timeout(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
|
||||
@@ -545,6 +545,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -597,6 +600,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -649,6 +655,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -753,6 +762,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -807,6 +819,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -959,6 +974,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1011,6 +1029,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1063,6 +1084,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1167,6 +1191,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -1221,6 +1248,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1768,6 +1798,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1820,6 +1853,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1872,6 +1908,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -1924,6 +1963,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -1978,6 +2020,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2326,6 +2371,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2378,6 +2426,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2430,6 +2481,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2614,6 +2668,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -2668,6 +2725,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2768,6 +2828,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2820,6 +2883,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -2872,6 +2938,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -3066,6 +3135,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -3120,6 +3192,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -3220,6 +3295,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -3272,6 +3350,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -3324,6 +3405,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -3520,6 +3604,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
@@ -3574,6 +3661,9 @@
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
|
||||
@@ -34,6 +34,8 @@ async def test_device(
|
||||
identifiers={(DOMAIN, "96a5ef74-5832-a84b-f1f7-ca799957065d")}
|
||||
)
|
||||
|
||||
mock_smartthings.get_device_status.reset_mock()
|
||||
|
||||
with patch("homeassistant.components.smartthings.diagnostics.EVENT_WAIT_TIME", 0.1):
|
||||
diag = await get_diagnostics_for_device(
|
||||
hass, hass_client, mock_config_entry, device
|
||||
@@ -42,3 +44,6 @@ async def test_device(
|
||||
assert diag == snapshot(
|
||||
exclude=props("last_changed", "last_reported", "last_updated")
|
||||
)
|
||||
mock_smartthings.get_device_status.assert_called_once_with(
|
||||
"96a5ef74-5832-a84b-f1f7-ca799957065d"
|
||||
)
|
||||
|
||||
@@ -20,12 +20,12 @@ MOCK_LIST_WITH_PROPERTIES = {
|
||||
"/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.tar": [],
|
||||
"/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json": [
|
||||
Property(
|
||||
namespace="homeassistant",
|
||||
namespace="https://home-assistant.io",
|
||||
name="backup_id",
|
||||
value="23e64aec",
|
||||
),
|
||||
Property(
|
||||
namespace="homeassistant",
|
||||
namespace="https://home-assistant.io",
|
||||
name="metadata_version",
|
||||
value="1",
|
||||
),
|
||||
|
||||
Reference in New Issue
Block a user