mirror of
https://github.com/home-assistant/core.git
synced 2025-08-01 03:35:09 +02:00
Migrate OpenAI to config subentries (#147282)
* Migrate OpenAI to config subentries * Add latest changes from Google subentries * Update homeassistant/components/openai_conversation/__init__.py Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com> --------- Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
This commit is contained in:
@@ -4,6 +4,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.openai_conversation.const import DEFAULT_CONVERSATION_NAME
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
@@ -21,6 +22,15 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
data={
|
||||
"api_key": "bla",
|
||||
},
|
||||
version=2,
|
||||
subentries_data=[
|
||||
{
|
||||
"data": {},
|
||||
"subentry_type": "conversation",
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
],
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
return entry
|
||||
@@ -31,8 +41,10 @@ def mock_config_entry_with_assist(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> MockConfigEntry:
|
||||
"""Mock a config entry with assist."""
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST}
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
next(iter(mock_config_entry.subentries.values())),
|
||||
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
|
||||
)
|
||||
return mock_config_entry
|
||||
|
||||
|
@@ -6,7 +6,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'content': None,
|
||||
'role': 'assistant',
|
||||
'tool_calls': list([
|
||||
@@ -20,14 +20,14 @@
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'role': 'tool_result',
|
||||
'tool_call_id': 'call_call_1',
|
||||
'tool_name': 'test_tool',
|
||||
'tool_result': 'value1',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'content': None,
|
||||
'role': 'assistant',
|
||||
'tool_calls': list([
|
||||
@@ -41,14 +41,14 @@
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'role': 'tool_result',
|
||||
'tool_call_id': 'call_call_2',
|
||||
'tool_name': 'test_tool',
|
||||
'tool_result': 'value2',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'content': 'Cool',
|
||||
'role': 'assistant',
|
||||
'tool_calls': None,
|
||||
@@ -62,7 +62,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'content': None,
|
||||
'role': 'assistant',
|
||||
'tool_calls': list([
|
||||
@@ -76,14 +76,14 @@
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'role': 'tool_result',
|
||||
'tool_call_id': 'call_call_1',
|
||||
'tool_name': 'test_tool',
|
||||
'tool_result': 'value1',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.openai',
|
||||
'agent_id': 'conversation.openai_conversation',
|
||||
'content': 'Cool',
|
||||
'role': 'assistant',
|
||||
'tool_calls': None,
|
||||
|
@@ -24,12 +24,13 @@ from homeassistant.components.openai_conversation.const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
@@ -72,42 +73,132 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
assert result2["data"] == {
|
||||
"api_key": "bla",
|
||||
}
|
||||
assert result2["options"] == RECOMMENDED_OPTIONS
|
||||
assert result2["options"] == {}
|
||||
assert result2["subentries"] == [
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
]
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_options_recommended(
|
||||
async def test_duplicate_entry(hass: HomeAssistant) -> None:
|
||||
"""Test we abort on duplicate config entry."""
|
||||
MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_API_KEY: "bla"},
|
||||
).add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert not result["errors"]
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.openai_conversation.config_flow.openai.resources.models.AsyncModels.list",
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_API_KEY: "bla",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_creating_conversation_subentry(
|
||||
hass: HomeAssistant,
|
||||
mock_init_component: None,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test creating a conversation subentry."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.subentries.async_init(
|
||||
(mock_config_entry.entry_id, "conversation"),
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "init"
|
||||
assert not result["errors"]
|
||||
|
||||
result2 = await hass.config_entries.subentries.async_configure(
|
||||
result["flow_id"],
|
||||
{"name": "My Custom Agent", **RECOMMENDED_OPTIONS},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == "My Custom Agent"
|
||||
|
||||
processed_options = RECOMMENDED_OPTIONS.copy()
|
||||
processed_options[CONF_PROMPT] = processed_options[CONF_PROMPT].strip()
|
||||
|
||||
assert result2["data"] == processed_options
|
||||
|
||||
|
||||
async def test_creating_conversation_subentry_not_loaded(
|
||||
hass: HomeAssistant,
|
||||
mock_init_component,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test creating a conversation subentry when entry is not loaded."""
|
||||
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
with patch(
|
||||
"homeassistant.components.openai_conversation.config_flow.openai.resources.models.AsyncModels.list",
|
||||
return_value=[],
|
||||
):
|
||||
result = await hass.config_entries.subentries.async_init(
|
||||
(mock_config_entry.entry_id, "conversation"),
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "entry_not_loaded"
|
||||
|
||||
|
||||
async def test_subentry_recommended(
|
||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||
) -> None:
|
||||
"""Test the options flow with recommended settings."""
|
||||
options_flow = await hass.config_entries.options.async_init(
|
||||
mock_config_entry.entry_id
|
||||
"""Test the subentry flow with recommended settings."""
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||
hass, subentry.subentry_id
|
||||
)
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
options_flow["flow_id"],
|
||||
options = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
"prompt": "Speak like a pirate",
|
||||
"recommended": True,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert options["data"]["prompt"] == "Speak like a pirate"
|
||||
assert options["type"] is FlowResultType.ABORT
|
||||
assert options["reason"] == "reconfigure_successful"
|
||||
assert subentry.data["prompt"] == "Speak like a pirate"
|
||||
|
||||
|
||||
async def test_options_unsupported_model(
|
||||
async def test_subentry_unsupported_model(
|
||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||
) -> None:
|
||||
"""Test the options form giving error about models not supported."""
|
||||
options_flow = await hass.config_entries.options.async_init(
|
||||
mock_config_entry.entry_id
|
||||
"""Test the subentry form giving error about models not supported."""
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||
hass, subentry.subentry_id
|
||||
)
|
||||
assert options_flow["type"] == FlowResultType.FORM
|
||||
assert options_flow["step_id"] == "init"
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["step_id"] == "init"
|
||||
|
||||
# Configure initial step
|
||||
options_flow = await hass.config_entries.options.async_configure(
|
||||
options_flow["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
@@ -115,19 +206,19 @@ async def test_options_unsupported_model(
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert options_flow["type"] == FlowResultType.FORM
|
||||
assert options_flow["step_id"] == "advanced"
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["step_id"] == "advanced"
|
||||
|
||||
# Configure advanced step
|
||||
options_flow = await hass.config_entries.options.async_configure(
|
||||
options_flow["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
CONF_CHAT_MODEL: "o1-mini",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert options_flow["type"] is FlowResultType.FORM
|
||||
assert options_flow["errors"] == {"chat_model": "model_not_supported"}
|
||||
assert subentry_flow["type"] is FlowResultType.FORM
|
||||
assert subentry_flow["errors"] == {"chat_model": "model_not_supported"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -494,7 +585,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_options_switching(
|
||||
async def test_subentry_switching(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry,
|
||||
mock_init_component,
|
||||
@@ -502,16 +593,22 @@ async def test_options_switching(
|
||||
new_options,
|
||||
expected_options,
|
||||
) -> None:
|
||||
"""Test the options form."""
|
||||
hass.config_entries.async_update_entry(mock_config_entry, options=current_options)
|
||||
options = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
|
||||
assert options["step_id"] == "init"
|
||||
"""Test the subentry form."""
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry, subentry, data=current_options
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||
hass, subentry.subentry_id
|
||||
)
|
||||
assert subentry_flow["step_id"] == "init"
|
||||
|
||||
for step_options in new_options:
|
||||
assert options["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
|
||||
# Test that current options are showed as suggested values:
|
||||
for key in options["data_schema"].schema:
|
||||
for key in subentry_flow["data_schema"].schema:
|
||||
if (
|
||||
isinstance(key.description, dict)
|
||||
and "suggested_value" in key.description
|
||||
@@ -523,38 +620,42 @@ async def test_options_switching(
|
||||
assert key.description["suggested_value"] == current_option
|
||||
|
||||
# Configure current step
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
options["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
step_options,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert options["data"] == expected_options
|
||||
assert subentry_flow["type"] is FlowResultType.ABORT
|
||||
assert subentry_flow["reason"] == "reconfigure_successful"
|
||||
assert subentry.data == expected_options
|
||||
|
||||
|
||||
async def test_options_web_search_user_location(
|
||||
async def test_subentry_web_search_user_location(
|
||||
hass: HomeAssistant, mock_config_entry, mock_init_component
|
||||
) -> None:
|
||||
"""Test fetching user location."""
|
||||
options = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
|
||||
assert options["type"] == FlowResultType.FORM
|
||||
assert options["step_id"] == "init"
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
subentry_flow = await mock_config_entry.start_subentry_reconfigure_flow(
|
||||
hass, subentry.subentry_id
|
||||
)
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["step_id"] == "init"
|
||||
|
||||
# Configure initial step
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
options["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
},
|
||||
)
|
||||
assert options["type"] == FlowResultType.FORM
|
||||
assert options["step_id"] == "advanced"
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["step_id"] == "advanced"
|
||||
|
||||
# Configure advanced step
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
options["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
|
||||
@@ -563,8 +664,8 @@ async def test_options_web_search_user_location(
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert options["type"] == FlowResultType.FORM
|
||||
assert options["step_id"] == "model"
|
||||
assert subentry_flow["type"] == FlowResultType.FORM
|
||||
assert subentry_flow["step_id"] == "model"
|
||||
|
||||
hass.config.country = "US"
|
||||
hass.config.time_zone = "America/Los_Angeles"
|
||||
@@ -601,8 +702,8 @@ async def test_options_web_search_user_location(
|
||||
)
|
||||
|
||||
# Configure model step
|
||||
options = await hass.config_entries.options.async_configure(
|
||||
options["flow_id"],
|
||||
subentry_flow = await hass.config_entries.subentries.async_configure(
|
||||
subentry_flow["flow_id"],
|
||||
{
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "medium",
|
||||
@@ -614,8 +715,9 @@ async def test_options_web_search_user_location(
|
||||
mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following"
|
||||
" coordinates located: (37.7749, -122.4194)?"
|
||||
)
|
||||
assert options["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert options["data"] == {
|
||||
assert subentry_flow["type"] is FlowResultType.ABORT
|
||||
assert subentry_flow["reason"] == "reconfigure_successful"
|
||||
assert subentry.data == {
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
|
@@ -153,20 +153,18 @@ async def test_entity(
|
||||
mock_init_component,
|
||||
) -> None:
|
||||
"""Test entity properties."""
|
||||
state = hass.states.get("conversation.openai")
|
||||
state = hass.states.get("conversation.openai_conversation")
|
||||
assert state
|
||||
assert state.attributes["supported_features"] == 0
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
**mock_config_entry.options,
|
||||
CONF_LLM_HASS_API: "assist",
|
||||
},
|
||||
next(iter(mock_config_entry.subentries.values())),
|
||||
data={CONF_LLM_HASS_API: "assist"},
|
||||
)
|
||||
await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
||||
|
||||
state = hass.states.get("conversation.openai")
|
||||
state = hass.states.get("conversation.openai_conversation")
|
||||
assert state
|
||||
assert (
|
||||
state.attributes["supported_features"]
|
||||
@@ -261,7 +259,7 @@ async def test_incomplete_response(
|
||||
"Please tell me a big story",
|
||||
"mock-conversation-id",
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||
@@ -285,7 +283,7 @@ async def test_incomplete_response(
|
||||
"please tell me a big story",
|
||||
"mock-conversation-id",
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||
@@ -324,7 +322,7 @@ async def test_failed_response(
|
||||
"next natural number please",
|
||||
"mock-conversation-id",
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR, result
|
||||
@@ -583,7 +581,7 @@ async def test_function_call(
|
||||
"Please call the test function",
|
||||
mock_chat_log.conversation_id,
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert mock_create_stream.call_args.kwargs["input"][2] == {
|
||||
@@ -630,7 +628,7 @@ async def test_function_call_without_reasoning(
|
||||
"Please call the test function",
|
||||
mock_chat_log.conversation_id,
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
@@ -686,7 +684,7 @@ async def test_function_call_invalid(
|
||||
"Please call the test function",
|
||||
"mock-conversation-id",
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
|
||||
@@ -720,7 +718,7 @@ async def test_assist_api_tools_conversion(
|
||||
]
|
||||
|
||||
await conversation.async_converse(
|
||||
hass, "hello", None, Context(), agent_id="conversation.openai"
|
||||
hass, "hello", None, Context(), agent_id="conversation.openai_conversation"
|
||||
)
|
||||
|
||||
tools = mock_create_stream.mock_calls[0][2]["tools"]
|
||||
@@ -735,10 +733,12 @@ async def test_web_search(
|
||||
mock_chat_log: MockChatLog, # noqa: F811
|
||||
) -> None:
|
||||
"""Test web_search_tool."""
|
||||
hass.config_entries.async_update_entry(
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
**mock_config_entry.options,
|
||||
subentry,
|
||||
data={
|
||||
**subentry.data,
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "low",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: True,
|
||||
@@ -764,7 +764,7 @@ async def test_web_search(
|
||||
"What's on the latest news?",
|
||||
mock_chat_log.conversation_id,
|
||||
Context(),
|
||||
agent_id="conversation.openai",
|
||||
agent_id="conversation.openai_conversation",
|
||||
)
|
||||
|
||||
assert mock_create_stream.mock_calls[0][2]["tools"] == [
|
||||
|
@@ -15,8 +15,10 @@ from openai.types.responses import Response, ResponseOutputMessage, ResponseOutp
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.openai_conversation import CONF_FILENAMES
|
||||
from homeassistant.components.openai_conversation.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@@ -536,3 +538,271 @@ async def test_generate_content_service_error(
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
|
||||
async def test_migration_from_v1_to_v2(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test migration from version 1 to version 2."""
|
||||
# Create a v1 config entry with conversation options and an entity
|
||||
OPTIONS = {
|
||||
"recommended": True,
|
||||
"llm_hass_api": ["assist"],
|
||||
"prompt": "You are a helpful assistant",
|
||||
"chat_model": "gpt-4o-mini",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"api_key": "1234"},
|
||||
options=OPTIONS,
|
||||
version=1,
|
||||
title="ChatGPT",
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||
name=mock_config_entry.title,
|
||||
manufacturer="OpenAI",
|
||||
model="ChatGPT",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
entity = entity_registry.async_get_or_create(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
mock_config_entry.entry_id,
|
||||
config_entry=mock_config_entry,
|
||||
device_id=device.id,
|
||||
suggested_object_id="google_generative_ai_conversation",
|
||||
)
|
||||
|
||||
# Run migration
|
||||
with patch(
|
||||
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||
return_value=True,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.version == 2
|
||||
assert mock_config_entry.data == {"api_key": "1234"}
|
||||
assert mock_config_entry.options == {}
|
||||
|
||||
assert len(mock_config_entry.subentries) == 1
|
||||
|
||||
subentry = next(iter(mock_config_entry.subentries.values()))
|
||||
assert subentry.unique_id is None
|
||||
assert subentry.title == "ChatGPT"
|
||||
assert subentry.subentry_type == "conversation"
|
||||
assert subentry.data == OPTIONS
|
||||
|
||||
migrated_entity = entity_registry.async_get(entity.entity_id)
|
||||
assert migrated_entity is not None
|
||||
assert migrated_entity.config_entry_id == mock_config_entry.entry_id
|
||||
assert migrated_entity.config_subentry_id == subentry.subentry_id
|
||||
assert migrated_entity.unique_id == subentry.subentry_id
|
||||
|
||||
# Check device migration
|
||||
assert not device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)}
|
||||
)
|
||||
assert (
|
||||
migrated_device := device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)}
|
||||
)
|
||||
)
|
||||
assert migrated_device.identifiers == {(DOMAIN, subentry.subentry_id)}
|
||||
assert migrated_device.id == device.id
|
||||
|
||||
|
||||
async def test_migration_from_v1_to_v2_with_multiple_keys(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test migration from version 1 to version 2 with different API keys."""
|
||||
# Create two v1 config entries with different API keys
|
||||
options = {
|
||||
"recommended": True,
|
||||
"llm_hass_api": ["assist"],
|
||||
"prompt": "You are a helpful assistant",
|
||||
"chat_model": "gpt-4o-mini",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"api_key": "1234"},
|
||||
options=options,
|
||||
version=1,
|
||||
title="ChatGPT 1",
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
mock_config_entry_2 = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"api_key": "12345"},
|
||||
options=options,
|
||||
version=1,
|
||||
title="ChatGPT 2",
|
||||
)
|
||||
mock_config_entry_2.add_to_hass(hass)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||
name=mock_config_entry.title,
|
||||
manufacturer="OpenAI",
|
||||
model="ChatGPT 1",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
mock_config_entry.entry_id,
|
||||
config_entry=mock_config_entry,
|
||||
device_id=device.id,
|
||||
suggested_object_id="chatgpt_1",
|
||||
)
|
||||
|
||||
device_2 = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry_2.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
|
||||
name=mock_config_entry_2.title,
|
||||
manufacturer="OpenAI",
|
||||
model="ChatGPT 2",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
mock_config_entry_2.entry_id,
|
||||
config_entry=mock_config_entry_2,
|
||||
device_id=device_2.id,
|
||||
suggested_object_id="chatgpt_2",
|
||||
)
|
||||
|
||||
# Run migration
|
||||
with patch(
|
||||
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||
return_value=True,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 2
|
||||
|
||||
for idx, entry in enumerate(entries):
|
||||
assert entry.version == 2
|
||||
assert not entry.options
|
||||
assert len(entry.subentries) == 1
|
||||
subentry = list(entry.subentries.values())[0]
|
||||
assert subentry.subentry_type == "conversation"
|
||||
assert subentry.data == options
|
||||
assert subentry.title == f"ChatGPT {idx + 1}"
|
||||
|
||||
dev = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, list(entry.subentries.values())[0].subentry_id)}
|
||||
)
|
||||
assert dev is not None
|
||||
|
||||
|
||||
async def test_migration_from_v1_to_v2_with_same_keys(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test migration from version 1 to version 2 with same API keys consolidates entries."""
|
||||
# Create two v1 config entries with the same API key
|
||||
options = {
|
||||
"recommended": True,
|
||||
"llm_hass_api": ["assist"],
|
||||
"prompt": "You are a helpful assistant",
|
||||
"chat_model": "gpt-4o-mini",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"api_key": "1234"},
|
||||
options=options,
|
||||
version=1,
|
||||
title="ChatGPT",
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
mock_config_entry_2 = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"api_key": "1234"}, # Same API key
|
||||
options=options,
|
||||
version=1,
|
||||
title="ChatGPT 2",
|
||||
)
|
||||
mock_config_entry_2.add_to_hass(hass)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||
name=mock_config_entry.title,
|
||||
manufacturer="OpenAI",
|
||||
model="ChatGPT",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
mock_config_entry.entry_id,
|
||||
config_entry=mock_config_entry,
|
||||
device_id=device.id,
|
||||
suggested_object_id="chatgpt",
|
||||
)
|
||||
|
||||
device_2 = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry_2.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry_2.entry_id)},
|
||||
name=mock_config_entry_2.title,
|
||||
manufacturer="OpenAI",
|
||||
model="ChatGPT",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"conversation",
|
||||
DOMAIN,
|
||||
mock_config_entry_2.entry_id,
|
||||
config_entry=mock_config_entry_2,
|
||||
device_id=device_2.id,
|
||||
suggested_object_id="chatgpt_2",
|
||||
)
|
||||
|
||||
# Run migration
|
||||
with patch(
|
||||
"homeassistant.components.openai_conversation.async_setup_entry",
|
||||
return_value=True,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should have only one entry left (consolidated)
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
|
||||
entry = entries[0]
|
||||
assert entry.version == 2
|
||||
assert not entry.options
|
||||
assert len(entry.subentries) == 2 # Two subentries from the two original entries
|
||||
|
||||
# Check both subentries exist with correct data
|
||||
subentries = list(entry.subentries.values())
|
||||
titles = [sub.title for sub in subentries]
|
||||
assert "ChatGPT" in titles
|
||||
assert "ChatGPT 2" in titles
|
||||
|
||||
for subentry in subentries:
|
||||
assert subentry.subentry_type == "conversation"
|
||||
assert subentry.data == options
|
||||
|
||||
# Check devices were migrated correctly
|
||||
dev = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)}
|
||||
)
|
||||
assert dev is not None
|
||||
|
Reference in New Issue
Block a user