Compare commits

..

2 Commits

Author SHA1 Message Date
G Johansson
66897a247a Fix 2026-04-07 15:36:54 +00:00
G Johansson
5ae4bed868 Fix double reloading in unifi 2026-04-05 15:38:25 +00:00
24 changed files with 102 additions and 624 deletions

View File

@@ -48,7 +48,6 @@ from .const import (
CONF_CODE_EXECUTION,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_PROMPT_CACHING,
CONF_RECOMMENDED,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
@@ -67,7 +66,6 @@ from .const import (
NON_ADAPTIVE_THINKING_MODELS,
NON_THINKING_MODELS,
WEB_SEARCH_UNSUPPORTED_MODELS,
PromptCaching,
)
if TYPE_CHECKING:
@@ -358,16 +356,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
CONF_TEMPERATURE,
default=DEFAULT[CONF_TEMPERATURE],
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_PROMPT_CACHING,
default=DEFAULT[CONF_PROMPT_CACHING],
): SelectSelector(
SelectSelectorConfig(
options=[x.value for x in PromptCaching],
translation_key=CONF_PROMPT_CACHING,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
if user_input is not None:

View File

@@ -1,6 +1,5 @@
"""Constants for the Anthropic integration."""
from enum import StrEnum
import logging
DOMAIN = "anthropic"
@@ -14,7 +13,6 @@ CONF_PROMPT = "prompt"
CONF_CHAT_MODEL = "chat_model"
CONF_CODE_EXECUTION = "code_execution"
CONF_MAX_TOKENS = "max_tokens"
CONF_PROMPT_CACHING = "prompt_caching"
CONF_TEMPERATURE = "temperature"
CONF_THINKING_BUDGET = "thinking_budget"
CONF_THINKING_EFFORT = "thinking_effort"
@@ -26,20 +24,10 @@ CONF_WEB_SEARCH_REGION = "region"
CONF_WEB_SEARCH_COUNTRY = "country"
CONF_WEB_SEARCH_TIMEZONE = "timezone"
class PromptCaching(StrEnum):
"""Prompt caching options."""
OFF = "off"
PROMPT = "prompt"
AUTOMATIC = "automatic"
DEFAULT = {
CONF_CHAT_MODEL: "claude-haiku-4-5",
CONF_CODE_EXECUTION: False,
CONF_MAX_TOKENS: 3000,
CONF_PROMPT_CACHING: PromptCaching.PROMPT.value,
CONF_TEMPERATURE: 1.0,
CONF_THINKING_BUDGET: 0,
CONF_THINKING_EFFORT: "low",

View File

@@ -91,7 +91,6 @@ from .const import (
CONF_CHAT_MODEL,
CONF_CODE_EXECUTION,
CONF_MAX_TOKENS,
CONF_PROMPT_CACHING,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
@@ -110,7 +109,6 @@ from .const import (
NON_THINKING_MODELS,
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS,
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
PromptCaching,
)
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
@@ -680,7 +678,7 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
entry_type=dr.DeviceEntryType.SERVICE,
)
async def _async_handle_chat_log( # noqa: C901
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
structure_name: str | None = None,
@@ -696,6 +694,15 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
translation_domain=DOMAIN, translation_key="system_message_not_found"
)
# System prompt with caching enabled
system_prompt: list[TextBlockParam] = [
TextBlockParam(
type="text",
text=system.content,
cache_control={"type": "ephemeral"},
)
]
messages, container_id = _convert_content(chat_log.content[1:])
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
@@ -704,28 +711,11 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
system=system.content,
system=system_prompt,
stream=True,
container=container_id,
)
if (
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
== PromptCaching.PROMPT
):
model_args["system"] = [
{
"type": "text",
"text": system.content,
"cache_control": {"type": "ephemeral"},
}
]
elif (
options.get(CONF_PROMPT_CACHING, DEFAULT[CONF_PROMPT_CACHING])
== PromptCaching.AUTOMATIC
):
model_args["cache_control"] = {"type": "ephemeral"}
if not model.startswith(tuple(NON_ADAPTIVE_THINKING_MODELS)):
thinking_effort = options.get(
CONF_THINKING_EFFORT, DEFAULT[CONF_THINKING_EFFORT]

View File

@@ -47,13 +47,11 @@
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::prompt_caching%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"data_description": {
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
"prompt_caching": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::prompt_caching%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
@@ -105,13 +103,11 @@
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"prompt_caching": "Caching strategy",
"temperature": "Temperature"
},
"data_description": {
"chat_model": "The model to serve the responses.",
"max_tokens": "Limit the number of response tokens.",
"prompt_caching": "Optimize your API cost and response times based on your usage.",
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
},
"title": "Advanced settings"
@@ -214,13 +210,6 @@
}
},
"selector": {
"prompt_caching": {
"options": {
"automatic": "Full",
"off": "Disabled",
"prompt": "System prompt"
}
},
"thinking_effort": {
"options": {
"high": "[%key:common::state::high%]",

View File

@@ -23,16 +23,3 @@ ATTR_MEDIA_DESCRIPTION = "media_description"
ATTR_LANGUAGE = "language"
ATTR_DURATION = "duration"
ATTR_HIDE_NOTIFICATIONS = "hide_notifications"
ATTR_DISPLAY_NAME = "display_name"
ATTR_NOTE = "note"
ATTR_AVATAR = "avatar"
ATTR_AVATAR_MIME_TYPE = "avatar_mime_type"
ATTR_HEADER = "header"
ATTR_HEADER_MIME_TYPE = "header_mime_type"
ATTR_LOCKED = "locked"
ATTR_BOT = "bot"
ATTR_DISCOVERABLE = "discoverable"
ATTR_FIELDS = "fields"
ATTR_ATTRIBUTION_DOMAINS = "attribution_domains"
ATTR_VALUE = "value"

View File

@@ -43,9 +43,6 @@
},
"unmute_account": {
"service": "mdi:account-voice"
},
"update_profile": {
"service": "mdi:account-edit"
}
}
}

View File

@@ -4,7 +4,6 @@ from datetime import timedelta
from enum import StrEnum
from functools import partial
from math import isfinite
from pathlib import Path
from typing import Any
from mastodon import Mastodon
@@ -12,14 +11,11 @@ from mastodon.Mastodon import (
Account,
MastodonAPIError,
MastodonNotFoundError,
MastodonUnauthorizedError,
MediaAttachment,
)
import voluptuous as vol
from homeassistant.components import camera, image
from homeassistant.components.media_source import async_resolve_media
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, ATTR_NAME
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import (
HomeAssistant,
ServiceCall,
@@ -29,34 +25,20 @@ from homeassistant.core import (
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.selector import MediaSelector
from .const import (
ATTR_ACCOUNT_NAME,
ATTR_ATTRIBUTION_DOMAINS,
ATTR_AVATAR,
ATTR_AVATAR_MIME_TYPE,
ATTR_BOT,
ATTR_CONTENT_WARNING,
ATTR_DISCOVERABLE,
ATTR_DISPLAY_NAME,
ATTR_DURATION,
ATTR_FIELDS,
ATTR_HEADER,
ATTR_HEADER_MIME_TYPE,
ATTR_HIDE_NOTIFICATIONS,
ATTR_IDEMPOTENCY_KEY,
ATTR_LANGUAGE,
ATTR_LOCKED,
ATTR_MEDIA,
ATTR_MEDIA_DESCRIPTION,
ATTR_MEDIA_WARNING,
ATTR_NOTE,
ATTR_STATUS,
ATTR_VALUE,
ATTR_VISIBILITY,
DOMAIN,
LOGGER,
)
from .coordinator import MastodonConfigEntry
from .utils import get_media_type
@@ -116,24 +98,6 @@ SERVICE_POST_SCHEMA = vol.Schema(
}
)
SERVICE_UPDATE_PROFILE = "update_profile"
SERVICE_UPDATE_PROFILE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CONFIG_ENTRY_ID): str,
vol.Optional(ATTR_DISPLAY_NAME): str,
vol.Optional(ATTR_NOTE): str,
vol.Optional(ATTR_AVATAR): MediaSelector({"accept": ["image/*"]}),
vol.Optional(ATTR_HEADER): MediaSelector({"accept": ["image/*"]}),
vol.Optional(ATTR_LOCKED): bool,
vol.Optional(ATTR_BOT): bool,
vol.Optional(ATTR_DISCOVERABLE): bool,
vol.Optional(ATTR_FIELDS): vol.All(
cv.ensure_list, vol.Length(max=4), [dict[str, str]]
),
vol.Optional(ATTR_ATTRIBUTION_DOMAINS): vol.All(cv.ensure_list, [str]),
}
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
@@ -160,13 +124,6 @@ def async_setup_services(hass: HomeAssistant) -> None:
hass.services.async_register(
DOMAIN, SERVICE_POST, _async_post, schema=SERVICE_POST_SCHEMA
)
hass.services.async_register(
DOMAIN,
SERVICE_UPDATE_PROFILE,
_async_update_profile,
schema=SERVICE_UPDATE_PROFILE_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
async def _async_account_lookup(
@@ -362,71 +319,3 @@ def _post(hass: HomeAssistant, client: Mastodon, **kwargs: Any) -> None:
translation_domain=DOMAIN,
translation_key="unable_to_send_message",
) from err
async def _async_update_profile(call: ServiceCall) -> ServiceResponse:
"""Update profile information."""
params = dict(call.data.copy())
entry: MastodonConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, params.pop(ATTR_CONFIG_ENTRY_ID)
)
client = entry.runtime_data.client
if avatar := params.pop(ATTR_AVATAR, None):
params[ATTR_AVATAR], params[ATTR_AVATAR_MIME_TYPE] = await _resolve_media(
call.hass, avatar
)
if header := params.pop(ATTR_HEADER, None):
params[ATTR_HEADER], params[ATTR_HEADER_MIME_TYPE] = await _resolve_media(
call.hass, header
)
if fields := params.get(ATTR_FIELDS):
params[ATTR_FIELDS] = [
(field[ATTR_NAME].strip(), field[ATTR_VALUE].strip())
for field in fields
if field[ATTR_NAME].strip()
]
try:
return await call.hass.async_add_executor_job(
lambda: client.account_update_credentials(**params)
)
except MastodonUnauthorizedError as error:
entry.async_start_reauth(call.hass)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="auth_failed",
) from error
except MastodonAPIError as err:
LOGGER.debug("Full exception:", exc_info=err)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unable_to_update_profile",
) from err
async def _resolve_media(
hass: HomeAssistant, media_source: dict[str, str]
) -> tuple[bytes | Path, str | None]:
"""Resolve media from a media source."""
media_content_id: str = media_source["media_content_id"]
if media_content_id.startswith("media-source://camera/"):
entity_id = media_content_id.removeprefix("media-source://camera/")
snapshot = await camera.async_get_image(hass, entity_id)
return snapshot.content, snapshot.content_type
if media_content_id.startswith("media-source://image/"):
entity_id = media_content_id.removeprefix("media-source://image/")
img = await image.async_get_image(hass, entity_id)
return img.content, img.content_type
media = await async_resolve_media(hass, media_source["media_content_id"], None)
if media.path is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="media_source_not_supported",
translation_placeholders={"media_content_id": media_content_id},
)
return media.path, media.mime_type

View File

@@ -1,6 +1,6 @@
get_account:
fields:
config_entry_id: &config_entry_id
config_entry_id:
required: true
selector:
config_entry:
@@ -11,7 +11,11 @@ get_account:
text:
mute_account:
fields:
config_entry_id: *config_entry_id
config_entry_id:
required: true
selector:
config_entry:
integration: mastodon
account_name:
required: true
selector:
@@ -28,14 +32,22 @@ mute_account:
boolean:
unmute_account:
fields:
config_entry_id: *config_entry_id
config_entry_id:
required: true
selector:
config_entry:
integration: mastodon
account_name:
required: true
selector:
text:
post:
fields:
config_entry_id: *config_entry_id
config_entry_id:
required: true
selector:
config_entry:
integration: mastodon
status:
required: true
selector:
@@ -270,55 +282,3 @@ post:
required: true
selector:
boolean:
update_profile:
fields:
config_entry_id: *config_entry_id
display_name:
selector:
text:
note:
selector:
text:
multiline: true
avatar:
required: false
selector:
media:
accept:
- "image/*"
header:
required: false
selector:
media:
accept:
- "image/*"
locked:
selector:
boolean:
bot:
selector:
boolean:
discoverable:
selector:
boolean:
fields:
selector:
object:
label_field: "value"
description_field: "name"
multiple: true
translation_key: fields
fields:
name:
required: true
selector:
text:
value:
required: true
selector:
text:
attribution_domains:
selector:
text:
multiple: true
type: url

View File

@@ -104,9 +104,6 @@
"idempotency_key_too_short": {
"message": "Idempotency key must be at least 4 characters long."
},
"media_source_not_supported": {
"message": "Media source {media_content_id} is not supported."
},
"mute_duration_too_long": {
"message": "Mute duration is too long."
},
@@ -125,26 +122,11 @@
"unable_to_unmute_account": {
"message": "Unable to unmute account \"{account_name}\""
},
"unable_to_update_profile": {
"message": "Unable to update profile."
},
"unable_to_upload_image": {
"message": "Unable to upload image {media_path}."
}
},
"selector": {
"fields": {
"fields": {
"name": {
"description": "The label for this field.",
"name": "Label"
},
"value": {
"description": "The value for this field.",
"name": "Value"
}
}
},
"post_visibility": {
"options": {
"direct": "Direct - Mentioned accounts only",
@@ -246,52 +228,6 @@
}
},
"name": "Unmute account"
},
"update_profile": {
"description": "Updates your Mastodon profile information and pictures.",
"fields": {
"attribution_domains": {
"description": "Websites allowed to credit you. Protects from false attributions. Note that setting attribution domains will replace all existing attribution domains, not just the ones specified here.",
"name": "Attribution domains"
},
"avatar": {
"description": "An image to set as your profile picture. WEBP, PNG, or JPG. At most 8 MB. Will be downscaled to 400x400px.",
"name": "Profile picture"
},
"bot": {
"description": "Signal to others that the account mainly performs automated actions.",
"name": "Automated account"
},
"config_entry_id": {
"description": "Select the Mastodon account to update the profile of.",
"name": "[%key:component::mastodon::services::post::fields::config_entry_id::name%]"
},
"discoverable": {
"description": "Whether your profile should be discoverable. Public posts and the profile may be featured or recommended across Mastodon.",
"name": "Discoverable"
},
"display_name": {
"description": "The display name to set on your profile.",
"name": "Display name"
},
"fields": {
"description": "Additional profile fields as key-value pairs. Your homepage, pronouns, age, anything you want. Note that updating fields will replace all existing fields, not just the ones specified here.",
"name": "Extra fields"
},
"header": {
"description": "An image to set as your profile header. WEBP, PNG, or JPG. At most 8 MB. Will be downscaled to 1500x500px.",
"name": "Header picture"
},
"locked": {
"description": "Whether to lock your profile. A locked profile requires you to approve followers and hides your posts from non-followers.",
"name": "Lock profile"
},
"note": {
"description": "The bio to set on your profile. You can @mention other people or #hashtags.",
"name": "Bio"
}
},
"name": "Update profile"
}
}
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyrainbird"],
"requirements": ["pyrainbird==6.3.0"]
"requirements": ["pyrainbird==6.1.1"]
}

View File

@@ -176,7 +176,7 @@ class UnifiFlowHandler(ConfigFlow, domain=DOMAIN):
):
return self.async_abort(reason="already_configured")
return self.async_update_reload_and_abort(
return self.async_update_and_abort(
config_entry, data=self.config, reason=abort_reason
)
@@ -230,7 +230,7 @@ class UnifiFlowHandler(ConfigFlow, domain=DOMAIN):
self._async_abort_entries_match({CONF_HOST: self.config[CONF_HOST]})
await self.async_set_unique_id(mac_address)
self._abort_if_unique_id_configured(updates=self.config)
self._abort_if_unique_id_configured(updates=self.config, reload_on_update=False)
self.context["title_placeholders"] = {
CONF_HOST: self.config[CONF_HOST],

View File

@@ -7,6 +7,13 @@ from typing import TYPE_CHECKING
import aiounifi
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import (
@@ -131,6 +138,26 @@ class UnifiHub:
the entry might already have been reset and thus is not available.
"""
hub = config_entry.runtime_data
check_keys = {
CONF_HOST: "host",
CONF_PORT: "port",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_SITE_ID: "site",
CONF_VERIFY_SSL: "ssl_context",
}
for key, value in check_keys.items():
if key == CONF_VERIFY_SSL:
# ssl_context is either False or a SSLContext object, so we need to compare it differently
if config_entry.data[CONF_VERIFY_SSL] != bool(
getattr(hub.config, value)
):
hass.config_entries.async_schedule_reload(config_entry.entry_id)
return
if config_entry.data[key] != getattr(hub.config, value):
hass.config_entries.async_schedule_reload(config_entry.entry_id)
return
hub.config = UnifiConfig.from_config_entry(config_entry)
async_dispatcher_send(hass, hub.signal_options_update)

View File

@@ -29,7 +29,7 @@ cached-ipaddress==1.0.1
certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
cryptography==46.0.6
cryptography==46.0.5
dbus-fast==4.0.4
file-read-backwards==2.0.0
fnv-hash-fast==2.0.0

View File

@@ -57,7 +57,7 @@ dependencies = [
"lru-dict==1.3.0",
"PyJWT==2.10.1",
# PyJWT has loose dependency. We want the latest one.
"cryptography==46.0.6",
"cryptography==46.0.5",
"Pillow==12.1.1",
"propcache==0.4.1",
"pyOpenSSL==26.0.0",

2
requirements.txt generated
View File

@@ -21,7 +21,7 @@ bcrypt==5.0.0
certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
cryptography==46.0.6
cryptography==46.0.5
fnv-hash-fast==2.0.0
ha-ffmpeg==3.2.2
hass-nabucasa==2.2.0

View File

@@ -25,7 +25,6 @@ from homeassistant.components.anthropic.const import (
CONF_CODE_EXECUTION,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_PROMPT_CACHING,
CONF_RECOMMENDED,
CONF_TEMPERATURE,
CONF_THINKING_BUDGET,
@@ -325,7 +324,6 @@ async def test_subentry_web_search_user_location(
"country": "US",
"max_tokens": 8192,
"prompt": "You are a helpful assistant",
"prompt_caching": "prompt",
"recommended": False,
"region": "California",
"temperature": 1.0,
@@ -433,7 +431,6 @@ async def test_model_list_error(
{
CONF_CHAT_MODEL: "claude-3-haiku-20240307",
CONF_TEMPERATURE: 1.0,
CONF_PROMPT_CACHING: "prompt",
},
),
{
@@ -442,7 +439,6 @@ async def test_model_list_error(
CONF_TEMPERATURE: 1.0,
CONF_CHAT_MODEL: "claude-3-haiku-20240307",
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
CONF_PROMPT_CACHING: "prompt",
},
),
( # Model with web search options
@@ -450,7 +446,6 @@ async def test_model_list_error(
CONF_RECOMMENDED: False,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_PROMPT: "bla",
CONF_PROMPT_CACHING: "prompt",
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_MAX_USES: 4,
CONF_WEB_SEARCH_USER_LOCATION: True,
@@ -468,7 +463,6 @@ async def test_model_list_error(
{
CONF_CHAT_MODEL: "claude-haiku-4-5",
CONF_TEMPERATURE: 1.0,
CONF_PROMPT_CACHING: "off",
},
{
CONF_WEB_SEARCH: False,
@@ -480,7 +474,6 @@ async def test_model_list_error(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_PROMPT_CACHING: "off",
CONF_TEMPERATURE: 1.0,
CONF_CHAT_MODEL: "claude-haiku-4-5",
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
@@ -496,7 +489,6 @@ async def test_model_list_error(
CONF_RECOMMENDED: False,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_PROMPT: "bla",
CONF_PROMPT_CACHING: "off",
CONF_WEB_SEARCH: False,
CONF_WEB_SEARCH_MAX_USES: 5,
CONF_WEB_SEARCH_USER_LOCATION: False,
@@ -512,7 +504,6 @@ async def test_model_list_error(
{
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_TEMPERATURE: 1.0,
CONF_PROMPT_CACHING: "automatic",
},
{
CONF_WEB_SEARCH: False,
@@ -525,7 +516,6 @@ async def test_model_list_error(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_PROMPT_CACHING: "automatic",
CONF_TEMPERATURE: 1.0,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
@@ -541,7 +531,6 @@ async def test_model_list_error(
CONF_RECOMMENDED: False,
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_PROMPT: "bla",
CONF_PROMPT_CACHING: "automatic",
CONF_WEB_SEARCH: False,
CONF_WEB_SEARCH_MAX_USES: 5,
CONF_WEB_SEARCH_USER_LOCATION: False,
@@ -557,7 +546,6 @@ async def test_model_list_error(
{
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_TEMPERATURE: 1.0,
CONF_PROMPT_CACHING: "prompt",
},
{
CONF_WEB_SEARCH: False,
@@ -570,7 +558,6 @@ async def test_model_list_error(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_PROMPT_CACHING: "prompt",
CONF_TEMPERATURE: 1.0,
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
@@ -594,14 +581,12 @@ async def test_model_list_error(
},
{
CONF_TEMPERATURE: 0.3,
CONF_PROMPT_CACHING: "automatic",
},
{},
),
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_PROMPT_CACHING: "automatic",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: DEFAULT[CONF_CHAT_MODEL],
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
@@ -616,7 +601,6 @@ async def test_model_list_error(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_PROMPT_CACHING: "off",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: DEFAULT[CONF_CHAT_MODEL],
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
@@ -806,7 +790,6 @@ async def test_creating_ai_task_subentry_advanced(
CONF_WEB_SEARCH_USER_LOCATION: False,
CONF_THINKING_BUDGET: 0,
CONF_CODE_EXECUTION: False,
CONF_PROMPT_CACHING: "prompt",
}

View File

@@ -33,7 +33,6 @@ from homeassistant.components import conversation
from homeassistant.components.anthropic.const import (
CONF_CHAT_MODEL,
CONF_CODE_EXECUTION,
CONF_PROMPT_CACHING,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
CONF_WEB_SEARCH,
@@ -169,7 +168,6 @@ async def test_template_variables(
mock_config_entry,
subentry,
data={
"prompt_caching": "off",
"prompt": (
"The user name is {{ user_name }}. "
"The user id is {{ llm_context.context.user_id }}."
@@ -196,10 +194,12 @@ async def test_template_variables(
== "Okay, let me take care of that for you."
)
assert (
"The user name is Test User." in mock_create_stream.call_args.kwargs["system"]
)
assert "The user id is 12345." in mock_create_stream.call_args.kwargs["system"]
system = mock_create_stream.call_args.kwargs["system"]
assert isinstance(system, list)
system_text = " ".join(block["text"] for block in system if "text" in block)
assert "The user name is Test User." in system_text
assert "The user id is 12345." in system_text
async def test_conversation_agent(
@@ -212,10 +212,9 @@ async def test_conversation_agent(
assert agent.supported_languages == "*"
async def test_prompt_caching_system_prompt(
async def test_system_prompt_uses_text_block_with_cache_control(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component: None,
mock_create_stream: AsyncMock,
) -> None:
"""Ensure system prompt is sent as TextBlockParam with cache_control."""
@@ -225,13 +224,16 @@ async def test_prompt_caching_system_prompt(
create_content_block(0, ["ok"]),
]
await conversation.async_converse(
hass,
"hello",
None,
context,
agent_id="conversation.claude_conversation",
)
with patch("anthropic.resources.models.AsyncModels.list", new_callable=AsyncMock):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await conversation.async_converse(
hass,
"hello",
None,
context,
agent_id="conversation.claude_conversation",
)
system = mock_create_stream.call_args.kwargs["system"]
assert isinstance(system, list)
@@ -240,41 +242,6 @@ async def test_prompt_caching_system_prompt(
assert block["type"] == "text"
assert "Home Assistant" in block["text"]
assert block["cache_control"] == {"type": "ephemeral"}
assert "cache_control" not in mock_create_stream.call_args.kwargs
async def test_prompt_caching_automatic(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component: None,
mock_create_stream: AsyncMock,
) -> None:
"""Ensure model args include cache_control."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_PROMPT_CACHING: "automatic",
},
)
context = Context()
mock_create_stream.return_value = [
create_content_block(0, ["ok"]),
]
await conversation.async_converse(
hass,
"hello",
None,
context,
agent_id="conversation.claude_conversation",
)
assert mock_create_stream.call_args.kwargs["cache_control"] == {"type": "ephemeral"}
system = mock_create_stream.call_args.kwargs["system"]
assert isinstance(system, str)
@patch("homeassistant.components.anthropic.entity.llm.AssistAPI._async_get_tools")

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from datetime import timedelta
from unittest.mock import patch
from evohomeasync2 import EvohomeClient
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -18,9 +19,10 @@ from tests.common import async_fire_time_changed
@pytest.mark.parametrize("install", ["minimal"])
@pytest.mark.usefixtures("evohome")
async def test_setup_platform(
hass: HomeAssistant,
config: dict[str, str],
evohome: EvohomeClient,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test entities and their states after setup of evohome."""

View File

@@ -7,7 +7,7 @@ import logging
from unittest.mock import Mock, patch
import aiohttp
from evohomeasync2 import exceptions as evo_exc
from evohomeasync2 import EvohomeClient, exceptions as evo_exc
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -172,8 +172,11 @@ async def test_client_request_failure_v2(
@pytest.mark.parametrize("install", ["default"])
@pytest.mark.usefixtures("evohome")
async def test_setup(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None:
async def test_setup(
hass: HomeAssistant,
evohome: EvohomeClient,
snapshot: SnapshotAssertion,
) -> None:
"""Test services after setup of evohome.
Registered services vary by the type of system.

View File

@@ -6,6 +6,7 @@ from datetime import UTC, datetime
from typing import Any
from unittest.mock import patch
from evohomeasync2 import EvohomeClient
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -24,8 +25,10 @@ from .const import TEST_INSTALLS
@pytest.mark.parametrize("install", ["default"])
@pytest.mark.usefixtures("evohome")
async def test_refresh_system(hass: HomeAssistant) -> None:
async def test_refresh_system(
hass: HomeAssistant,
evohome: EvohomeClient,
) -> None:
"""Test Evohome's refresh_system service (for all temperature control systems)."""
# EvoService.REFRESH_SYSTEM
@@ -60,9 +63,9 @@ async def test_reset_system(
@pytest.mark.parametrize("install", ["default"])
@pytest.mark.usefixtures("ctl_id")
async def test_set_system_mode(
hass: HomeAssistant,
ctl_id: str,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test Evohome's set_system_mode service (for a temperature control system)."""
@@ -290,7 +293,6 @@ _SET_SYSTEM_MODE_VALIDATOR_PARAMS = [
@pytest.mark.parametrize("install", ["default"])
@pytest.mark.usefixtures("evohome")
@pytest.mark.parametrize(
("service_data", "expected_translation_key"),
_SET_SYSTEM_MODE_VALIDATOR_PARAMS,
@@ -298,6 +300,7 @@ _SET_SYSTEM_MODE_VALIDATOR_PARAMS = [
)
async def test_set_system_mode_validator(
hass: HomeAssistant,
evohome: EvohomeClient,
service_data: dict[str, Any],
expected_translation_key: str,
) -> None:

View File

@@ -7,6 +7,7 @@ from __future__ import annotations
from unittest.mock import patch
from evohomeasync2 import EvohomeClient
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -49,9 +50,9 @@ async def test_setup_platform(
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
@pytest.mark.usefixtures("evohome")
async def test_set_operation_mode(
hass: HomeAssistant,
evohome: EvohomeClient,
freezer: FrozenDateTimeFactory,
snapshot: SnapshotAssertion,
) -> None:
@@ -118,8 +119,7 @@ async def test_set_operation_mode(
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
@pytest.mark.usefixtures("evohome")
async def test_set_away_mode(hass: HomeAssistant) -> None:
async def test_set_away_mode(hass: HomeAssistant, evohome: EvohomeClient) -> None:
"""Test SERVICE_SET_AWAY_MODE of an evohome DHW zone."""
# set_away_mode: off
@@ -152,8 +152,7 @@ async def test_set_away_mode(hass: HomeAssistant) -> None:
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
@pytest.mark.usefixtures("evohome")
async def test_turn_off(hass: HomeAssistant) -> None:
async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None:
"""Test SERVICE_TURN_OFF of an evohome DHW zone."""
# turn_off
@@ -171,8 +170,7 @@ async def test_turn_off(hass: HomeAssistant) -> None:
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
@pytest.mark.usefixtures("evohome")
async def test_turn_on(hass: HomeAssistant) -> None:
async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None:
"""Test SERVICE_TURN_ON of an evohome DHW zone."""
# turn_on

View File

@@ -46,11 +46,6 @@ def mock_mastodon_client() -> Generator[AsyncMock]:
)
client.mastodon_api_version = 2
client.status_post.return_value = None
client.account_update_credentials.return_value = Account.from_json(
load_fixture("account.json", DOMAIN)
)
yield client

View File

@@ -1,39 +1,21 @@
"""Tests for the Mastodon services."""
from datetime import timedelta
from pathlib import Path
from unittest.mock import AsyncMock, Mock, patch
from mastodon.Mastodon import (
MastodonAPIError,
MastodonNotFoundError,
MastodonUnauthorizedError,
MediaAttachment,
)
from mastodon.Mastodon import MastodonAPIError, MastodonNotFoundError, MediaAttachment
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components import camera, image, media_source
from homeassistant.components.mastodon.const import (
ATTR_ACCOUNT_NAME,
ATTR_ATTRIBUTION_DOMAINS,
ATTR_AVATAR,
ATTR_AVATAR_MIME_TYPE,
ATTR_BOT,
ATTR_CONTENT_WARNING,
ATTR_DISCOVERABLE,
ATTR_DISPLAY_NAME,
ATTR_DURATION,
ATTR_FIELDS,
ATTR_HEADER,
ATTR_HEADER_MIME_TYPE,
ATTR_HIDE_NOTIFICATIONS,
ATTR_IDEMPOTENCY_KEY,
ATTR_LANGUAGE,
ATTR_LOCKED,
ATTR_MEDIA,
ATTR_MEDIA_DESCRIPTION,
ATTR_NOTE,
ATTR_STATUS,
ATTR_VISIBILITY,
DOMAIN,
@@ -43,13 +25,10 @@ from homeassistant.components.mastodon.services import (
SERVICE_MUTE_ACCOUNT,
SERVICE_POST,
SERVICE_UNMUTE_ACCOUNT,
SERVICE_UPDATE_PROFILE,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.setup import async_setup_component
from . import setup_integration
@@ -672,207 +651,3 @@ async def test_service_entry_availability(
return_response=False,
)
assert err.value.translation_key == "service_config_entry_not_found"
@pytest.mark.parametrize(
("payload", "kwargs"),
[
(
{ATTR_DISPLAY_NAME: "Test User"},
{ATTR_DISPLAY_NAME: "Test User"},
),
(
{ATTR_NOTE: "bio"},
{ATTR_NOTE: "bio"},
),
(
{ATTR_LOCKED: True},
{ATTR_LOCKED: True},
),
(
{ATTR_BOT: False},
{ATTR_BOT: False},
),
(
{ATTR_DISCOVERABLE: True},
{ATTR_DISCOVERABLE: True},
),
(
{ATTR_FIELDS: [{"name": "Pronouns", "value": "He/Him, They/Them"}]},
{ATTR_FIELDS: [("Pronouns", "He/Him, They/Them")]},
),
(
{ATTR_ATTRIBUTION_DOMAINS: ["example.com", "test.com"]},
{ATTR_ATTRIBUTION_DOMAINS: ["example.com", "test.com"]},
),
(
{
ATTR_AVATAR: {
"media_content_id": "media-source://camera/camera.demo_camera",
"media_content_type": "image/jpeg",
}
},
{ATTR_AVATAR: b"I play the sax\n", ATTR_AVATAR_MIME_TYPE: "image/jpeg"},
),
(
{
ATTR_AVATAR: {
"media_content_id": "media-source://media_source/local/screenshot.jpg",
"media_content_type": "image/png",
}
},
{
ATTR_AVATAR: Path(
"tests/testing_config/media/screenshot.jpg"
).resolve(),
ATTR_AVATAR_MIME_TYPE: "image/jpeg",
},
),
(
{
ATTR_AVATAR: {
"media_content_id": "media-source://image/image.test",
"media_content_type": "image/png",
}
},
{ATTR_AVATAR: b"\x89PNG", ATTR_AVATAR_MIME_TYPE: "image/png"},
),
(
{
ATTR_HEADER: {
"media_content_id": "media-source://camera/camera.demo_camera",
"media_content_type": "image/jpeg",
}
},
{ATTR_HEADER: b"I play the sax\n", ATTR_HEADER_MIME_TYPE: "image/jpeg"},
),
(
{
ATTR_HEADER: {
"media_content_id": "media-source://image/image.test",
"media_content_type": "image/png",
}
},
{ATTR_HEADER: b"\x89PNG", ATTR_HEADER_MIME_TYPE: "image/png"},
),
(
{
ATTR_HEADER: {
"media_content_id": "media-source://media_source/local/screenshot.jpg",
"media_content_type": "image/png",
}
},
{
ATTR_HEADER: Path(
"tests/testing_config/media/screenshot.jpg"
).resolve(),
ATTR_HEADER_MIME_TYPE: "image/jpeg",
},
),
],
)
async def test_service_update_profile(
hass: HomeAssistant,
mock_mastodon_client: AsyncMock,
mock_config_entry: MockConfigEntry,
payload: dict[str, str],
kwargs: dict[str, str | None],
) -> None:
"""Test the update profile service."""
assert await async_setup_component(hass, "media_source", {})
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
with (
patch(
"homeassistant.components.camera.async_get_image",
return_value=camera.Image("image/jpeg", b"I play the sax\n"),
),
patch(
"homeassistant.components.image.async_get_image",
return_value=image.Image(content_type="image/png", content=b"\x89PNG"),
),
):
await hass.services.async_call(
DOMAIN,
SERVICE_UPDATE_PROFILE,
{ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, **payload},
blocking=True,
return_response=True,
)
mock_mastodon_client.account_update_credentials.assert_called_with(**kwargs)
@pytest.mark.parametrize(
("exception", "translation_key"),
[
(MastodonAPIError, "unable_to_update_profile"),
(MastodonUnauthorizedError, "auth_failed"),
],
)
async def test_service_update_profile_exceptions(
hass: HomeAssistant,
mock_mastodon_client: AsyncMock,
mock_config_entry: MockConfigEntry,
exception: type[Exception],
translation_key: str,
) -> None:
"""Test the update profile service exceptions."""
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_mastodon_client.account_update_credentials.side_effect = exception
with pytest.raises(HomeAssistantError) as err:
await hass.services.async_call(
DOMAIN,
SERVICE_UPDATE_PROFILE,
{
ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id,
ATTR_DISPLAY_NAME: "Test User",
},
blocking=True,
return_response=True,
)
assert err.value.translation_key == translation_key
@pytest.mark.usefixtures("mock_mastodon_client")
async def test_service_update_profile_media_source_not_supported(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the update profile service with unsupported media source."""
assert await async_setup_component(hass, "tts", {})
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
with (
patch(
"homeassistant.components.mastodon.services.async_resolve_media",
return_value=media_source.PlayMedia(
url="/api/tts_proxy/WDyphPCh3sAoO3koDY87ew.mp3",
mime_type="audio/mpeg",
path=None,
),
),
pytest.raises(HomeAssistantError) as err,
):
await hass.services.async_call(
DOMAIN,
SERVICE_UPDATE_PROFILE,
{
ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id,
ATTR_AVATAR: {
"media_content_id": "media-source://tts/demo?message=Hello+world%21&language=en",
"media_content_type": "audio/mp3",
},
},
blocking=True,
return_response=True,
)
assert err.value.translation_key == "media_source_not_supported"

View File

@@ -1,9 +1,10 @@
"""Tests for the Overseerr event platform."""
from datetime import UTC, datetime, timedelta
from datetime import UTC, datetime
from unittest.mock import AsyncMock, patch
from freezegun.api import FrozenDateTimeFactory
from future.backports.datetime import timedelta
import pytest
from python_overseerr import OverseerrConnectionError
from syrupy.assertion import SnapshotAssertion