forked from home-assistant/core
Convert Ollama to subentries
This commit is contained in:
@ -8,11 +8,11 @@ import logging
|
||||
import httpx
|
||||
import ollama
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.const import CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import (
|
||||
@ -22,6 +22,7 @@ from .const import (
|
||||
CONF_NUM_CTX,
|
||||
CONF_PROMPT,
|
||||
CONF_THINK,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_TIMEOUT,
|
||||
DOMAIN,
|
||||
)
|
||||
@ -65,3 +66,40 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return False
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if entry.version == 1:
|
||||
# Migrate from version 1 to version 2
|
||||
# Move conversation-specific options to a subentry
|
||||
subentry = ConfigSubentry(
|
||||
data=entry.options,
|
||||
subentry_type="conversation",
|
||||
title=DEFAULT_CONVERSATION_NAME,
|
||||
unique_id=None,
|
||||
)
|
||||
hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
subentry,
|
||||
)
|
||||
|
||||
# Migrate conversation entity to be linked to subentry
|
||||
ent_reg = er.async_get(hass)
|
||||
for entity_entry in er.async_entries_for_config_entry(ent_reg, entry.entry_id):
|
||||
if entity_entry.domain == Platform.CONVERSATION:
|
||||
ent_reg.async_update_entity(
|
||||
entity_entry.entity_id,
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
new_unique_id=subentry.subentry_id,
|
||||
)
|
||||
break
|
||||
|
||||
# Remove options from the main entry
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={},
|
||||
version=2,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -16,10 +16,11 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_LLM_HASS_API, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.const import CONF_LLM_HASS_API, CONF_NAME, CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
BooleanSelector,
|
||||
@ -43,6 +44,7 @@ from .const import (
|
||||
CONF_NUM_CTX,
|
||||
CONF_PROMPT,
|
||||
CONF_THINK,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_KEEP_ALIVE,
|
||||
DEFAULT_MAX_HISTORY,
|
||||
DEFAULT_MODEL,
|
||||
@ -70,7 +72,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ollama."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize config flow."""
|
||||
@ -148,6 +150,14 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title=_get_title(self.model),
|
||||
data={CONF_URL: self.url, CONF_MODEL: self.model},
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": {},
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_download(
|
||||
@ -189,6 +199,14 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(
|
||||
title=_get_title(self.model),
|
||||
data={CONF_URL: self.url, CONF_MODEL: self.model},
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": {},
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_failed(
|
||||
@ -197,41 +215,65 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Step after model downloading has failed."""
|
||||
return self.async_abort(reason="download_failed")
|
||||
|
||||
@staticmethod
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return OllamaOptionsFlow(config_entry)
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
|
||||
|
||||
class OllamaOptionsFlow(OptionsFlow):
|
||||
"""Ollama options flow."""
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.url: str = config_entry.data[CONF_URL]
|
||||
self.model: str = config_entry.data[CONF_MODEL]
|
||||
is_new: bool
|
||||
start_data: dict[str, Any]
|
||||
|
||||
async def async_step_init(
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
) -> SubentryFlowResult:
|
||||
"""Add a subentry."""
|
||||
self.is_new = True
|
||||
self.start_data = {}
|
||||
return await self.async_step_set_options()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle reconfiguration of a subentry."""
|
||||
self.is_new = False
|
||||
self.start_data = self._get_reconfigure_subentry().data.copy()
|
||||
return await self.async_step_set_options()
|
||||
|
||||
async def async_step_set_options(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Set conversation options."""
|
||||
options = self.start_data
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=_get_title(self.model), data=user_input
|
||||
if self.is_new:
|
||||
return self.async_create_entry(
|
||||
title=user_input.pop(CONF_NAME),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
options: Mapping[str, Any] = self.config_entry.options or {}
|
||||
schema = ollama_config_option_schema(self.hass, options)
|
||||
schema = ollama_config_option_schema(self.hass, self.is_new, options)
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(schema),
|
||||
step_id="set_options", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
|
||||
def ollama_config_option_schema(
|
||||
hass: HomeAssistant, options: Mapping[str, Any]
|
||||
hass: HomeAssistant, is_new: bool, options: Mapping[str, Any]
|
||||
) -> dict:
|
||||
"""Ollama options schema."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
@ -242,54 +284,72 @@ def ollama_config_option_schema(
|
||||
for api in llm.async_get_apis(hass)
|
||||
]
|
||||
|
||||
return {
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
if is_new:
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Optional(
|
||||
CONF_NUM_CTX,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)
|
||||
},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=MIN_NUM_CTX,
|
||||
max=MAX_NUM_CTX,
|
||||
step=1,
|
||||
mode=NumberSelectorMode.BOX,
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Optional(
|
||||
CONF_NUM_CTX,
|
||||
description={"suggested_value": options.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=MIN_NUM_CTX, max=MAX_NUM_CTX, step=1, mode=NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_HISTORY,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_MAX_HISTORY, DEFAULT_MAX_HISTORY)
|
||||
},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0, max=sys.maxsize, step=1, mode=NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_KEEP_ALIVE,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_KEEP_ALIVE, DEFAULT_KEEP_ALIVE)
|
||||
},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=-1, max=sys.maxsize, step=1, mode=NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_THINK,
|
||||
description={
|
||||
"suggested_value": options.get("think", DEFAULT_THINK),
|
||||
},
|
||||
): BooleanSelector(),
|
||||
}
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_HISTORY,
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_MAX_HISTORY, DEFAULT_MAX_HISTORY
|
||||
)
|
||||
},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0, max=sys.maxsize, step=1, mode=NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_KEEP_ALIVE,
|
||||
description={
|
||||
"suggested_value": options.get(CONF_KEEP_ALIVE, DEFAULT_KEEP_ALIVE)
|
||||
},
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=-1, max=sys.maxsize, step=1, mode=NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_THINK,
|
||||
description={
|
||||
"suggested_value": options.get("think", DEFAULT_THINK),
|
||||
},
|
||||
): BooleanSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def _get_title(model: str) -> str:
|
||||
|
@ -157,3 +157,5 @@ MODEL_NAMES = [ # https://ollama.com/library
|
||||
"zephyr",
|
||||
]
|
||||
DEFAULT_MODEL = "llama3.2:latest"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Ollama Conversation"
|
||||
|
@ -11,7 +11,7 @@ import ollama
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -25,6 +25,7 @@ from .const import (
|
||||
CONF_NUM_CTX,
|
||||
CONF_PROMPT,
|
||||
CONF_THINK,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEFAULT_KEEP_ALIVE,
|
||||
DEFAULT_MAX_HISTORY,
|
||||
DEFAULT_NUM_CTX,
|
||||
@ -44,8 +45,14 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up conversation entities."""
|
||||
agent = OllamaConversationEntity(config_entry)
|
||||
async_add_entities([agent])
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "conversation":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[OllamaConversationEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
@ -174,17 +181,15 @@ class OllamaConversationEntity(
|
||||
):
|
||||
"""Ollama conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
def __init__(self, entry: ConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
|
||||
# conversation id -> message history
|
||||
self._attr_name = entry.title
|
||||
self._attr_unique_id = entry.entry_id
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title or DEFAULT_CONVERSATION_NAME
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
if self.subentry.data.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
)
|
||||
@ -216,7 +221,7 @@ class OllamaConversationEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> conversation.ConversationResult:
|
||||
"""Call the API."""
|
||||
settings = {**self.entry.data, **self.entry.options}
|
||||
settings = {**self.entry.data, **self.subentry.data}
|
||||
|
||||
try:
|
||||
await chat_log.async_provide_llm_data(
|
||||
@ -248,7 +253,7 @@ class OllamaConversationEntity(
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
settings = {**self.entry.data, **self.entry.options}
|
||||
settings = {**self.entry.data, **self.subentry.data}
|
||||
|
||||
client = self.hass.data[DOMAIN][self.entry.entry_id]
|
||||
model = settings[CONF_MODEL]
|
||||
|
@ -22,23 +22,35 @@
|
||||
"download": "Please wait while the model is downloaded, which may take a very long time. Check your Ollama server logs for more details."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"prompt": "Instructions",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"max_history": "Max history messages",
|
||||
"num_ctx": "Context window size",
|
||||
"keep_alive": "Keep alive",
|
||||
"think": "Think before responding"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never.",
|
||||
"num_ctx": "Maximum number of text tokens the model can process. Lower to reduce Ollama RAM, or increase for a large number of exposed entities.",
|
||||
"think": "If enabled, the LLM will think before responding. This can improve response quality but may increase latency."
|
||||
"config_subentries": {
|
||||
"conversation": {
|
||||
"initiate_flow": {
|
||||
"user": "Add conversation agent",
|
||||
"reconfigure": "Reconfigure conversation agent"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
|
||||
"step": {
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "Instructions",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"max_history": "Max history messages",
|
||||
"num_ctx": "Context window size",
|
||||
"keep_alive": "Keep alive",
|
||||
"think": "Think before responding"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never.",
|
||||
"num_ctx": "Maximum number of text tokens the model can process. Lower to reduce Ollama RAM, or increase for a large number of exposed entities.",
|
||||
"think": "If enabled, the LLM will think before responding. This can improve response quality but may increase latency."
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,15 @@ def mock_config_entry(
|
||||
entry = MockConfigEntry(
|
||||
domain=ollama.DOMAIN,
|
||||
data=TEST_USER_DATA,
|
||||
options=mock_config_entry_options,
|
||||
version=2,
|
||||
subentries_data=[
|
||||
{
|
||||
"data": mock_config_entry_options,
|
||||
"subentry_type": "conversation",
|
||||
"title": "Ollama Conversation",
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
return entry
|
||||
@ -41,8 +49,10 @@ def mock_config_entry_with_assist(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> MockConfigEntry:
|
||||
"""Mock a config entry with assist."""
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST}
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
next(iter(mock_config_entry.subentries.values())),
|
||||
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
|
||||
)
|
||||
return mock_config_entry
|
||||
|
||||
|
@ -155,14 +155,21 @@ async def test_form_need_download(hass: HomeAssistant) -> None:
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_options(
|
||||
async def test_subentry_options(
|
||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||
) -> None:
|
||||
"""Test the options form."""
|
||||
options_flow = await hass.config_entries.options.async_init(
|
||||
mock_config_entry.entry_id
|
||||
"""Test the subentry options form."""
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
|
||||
# Test reconfiguration
|
||||
options_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||
hass, subentry.subentry_type, subentry.subentry_id
|
||||
)
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
|
||||
assert options_flow["type"] is FlowResultType.FORM
|
||||
assert options_flow["step_id"] == "set_options"
|
||||
|
||||
options = await hass.config_entries.subentries.async_configure(
|
||||
options_flow["flow_id"],
|
||||
{
|
||||
ollama.CONF_PROMPT: "test prompt",
|
||||
@ -172,8 +179,10 @@ async def test_options(
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert options["data"] == {
|
||||
|
||||
assert options["type"] is FlowResultType.ABORT
|
||||
assert options["reason"] == "reconfigure_successful"
|
||||
assert subentry.data == {
|
||||
ollama.CONF_PROMPT: "test prompt",
|
||||
ollama.CONF_MAX_HISTORY: 100,
|
||||
ollama.CONF_NUM_CTX: 32768,
|
||||
|
@ -35,7 +35,7 @@ async def stream_generator(response: dict | list[dict]) -> AsyncGenerator[dict]:
|
||||
yield msg
|
||||
|
||||
|
||||
@pytest.mark.parametrize("agent_id", [None, "conversation.mock_title"])
|
||||
@pytest.mark.parametrize("agent_id", [None, "conversation.ollama_conversation"])
|
||||
async def test_chat(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
@ -149,9 +149,11 @@ async def test_template_variables(
|
||||
mock_user.id = "12345"
|
||||
mock_user.name = "Test User"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
subentry,
|
||||
data={
|
||||
"prompt": (
|
||||
"The user name is {{ user_name }}. "
|
||||
"The user id is {{ llm_context.context.user_id }}."
|
||||
@ -382,10 +384,12 @@ async def test_unknown_hass_api(
|
||||
mock_init_component,
|
||||
) -> None:
|
||||
"""Test when we reference an API that no longer exists."""
|
||||
hass.config_entries.async_update_entry(
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
**mock_config_entry.options,
|
||||
subentry,
|
||||
data={
|
||||
**subentry.data,
|
||||
CONF_LLM_HASS_API: "non-existing",
|
||||
},
|
||||
)
|
||||
@ -518,8 +522,9 @@ async def test_message_history_unlimited(
|
||||
with (
|
||||
patch("ollama.AsyncClient.chat", side_effect=stream) as mock_chat,
|
||||
):
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry, options={ollama.CONF_MAX_HISTORY: 0}
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry, subentry, data={ollama.CONF_MAX_HISTORY: 0}
|
||||
)
|
||||
for i in range(100):
|
||||
result = await conversation.async_converse(
|
||||
@ -563,9 +568,11 @@ async def test_template_error(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that template error handling works."""
|
||||
hass.config_entries.async_update_entry(
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
subentry,
|
||||
data={
|
||||
"prompt": "talk like a {% if True %}smarthome{% else %}pirate please.",
|
||||
},
|
||||
)
|
||||
@ -593,7 +600,7 @@ async def test_conversation_agent(
|
||||
)
|
||||
assert agent.supported_languages == MATCH_ALL
|
||||
|
||||
state = hass.states.get("conversation.mock_title")
|
||||
state = hass.states.get("conversation.ollama_conversation")
|
||||
assert state
|
||||
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
|
||||
|
||||
@ -609,7 +616,7 @@ async def test_conversation_agent_with_assist(
|
||||
)
|
||||
assert agent.supported_languages == MATCH_ALL
|
||||
|
||||
state = hass.states.get("conversation.mock_title")
|
||||
state = hass.states.get("conversation.ollama_conversation")
|
||||
assert state
|
||||
assert (
|
||||
state.attributes[ATTR_SUPPORTED_FEATURES]
|
||||
@ -642,7 +649,7 @@ async def test_options(
|
||||
"test message",
|
||||
None,
|
||||
Context(),
|
||||
agent_id="conversation.mock_title",
|
||||
agent_id="conversation.ollama_conversation",
|
||||
)
|
||||
|
||||
assert mock_chat.call_count == 1
|
||||
@ -667,9 +674,11 @@ async def test_reasoning_filter(
|
||||
entry = MockConfigEntry()
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
subentry,
|
||||
data={
|
||||
ollama.CONF_THINK: think,
|
||||
},
|
||||
)
|
||||
|
Reference in New Issue
Block a user