forked from home-assistant/core
Compare commits
88 Commits
via_device
...
llm-task-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17a5815ca1 | ||
|
|
a8d4caab01 | ||
|
|
2be6acec03 | ||
|
|
fa21269f0d | ||
|
|
5f5869ffc6 | ||
|
|
7a2d99a450 | ||
|
|
6b669ce40c | ||
|
|
fdf4ed2aa5 | ||
|
|
1361d10cd7 | ||
|
|
8c7ba11493 | ||
|
|
29ce17abf4 | ||
|
|
c988d1ce36 | ||
|
|
ec02f6d010 | ||
|
|
9f19c4250a | ||
|
|
d7b583ae51 | ||
|
|
152e5254e2 | ||
|
|
3f8f7cd578 | ||
|
|
ed3fb62ffc | ||
|
|
1d14e1f018 | ||
|
|
2ac8901a0d | ||
|
|
6204fd5363 | ||
|
|
ce52ef64db | ||
|
|
059c12798d | ||
|
|
56aa809074 | ||
|
|
3d2dca5f0c | ||
|
|
cdb2b407be | ||
|
|
186ed451a9 | ||
|
|
761a0877e6 | ||
|
|
91bc56b15c | ||
|
|
d1e2c62433 | ||
|
|
524c16fbe1 | ||
|
|
2fdd3d66bc | ||
|
|
6a1e3b60ee | ||
|
|
434cd95a66 | ||
|
|
1a5bc2c7e0 | ||
|
|
a66e9a1a2c | ||
|
|
d880ce6bb4 | ||
|
|
c96023dcae | ||
|
|
2f8ad4d5bf | ||
|
|
038a848d53 | ||
|
|
ff17d79e73 | ||
|
|
a8201009f3 | ||
|
|
a349653282 | ||
|
|
355ee1178e | ||
|
|
30c5df3eaa | ||
|
|
10874af19a | ||
|
|
704118b3d0 | ||
|
|
7c575d0316 | ||
|
|
ab3f11bfe7 | ||
|
|
f0357539ad | ||
|
|
e70a2dd257 | ||
|
|
5ef99a15a5 | ||
|
|
6421973cd6 | ||
|
|
7201171eb5 | ||
|
|
1fb438fa6c | ||
|
|
89ae68c5af | ||
|
|
c78b66d5d5 | ||
|
|
d756cf91ce | ||
|
|
8d13bf93ab | ||
|
|
e86e793842 | ||
|
|
7e6bb021ce | ||
|
|
680b70aa29 | ||
|
|
8eebebc586 | ||
|
|
48e4624ba0 | ||
|
|
b0cf974b34 | ||
|
|
171f7c5f81 | ||
|
|
8807c530a9 | ||
|
|
28bd90aeb0 | ||
|
|
af1eccabce | ||
|
|
afc0a2789d | ||
|
|
78ed1097c4 | ||
|
|
2991726d35 | ||
|
|
c34596e54d | ||
|
|
74a92e2cd8 | ||
|
|
e19f178864 | ||
|
|
9dfbccf0cb | ||
|
|
64e503bc27 | ||
|
|
9d1e60cf7e | ||
|
|
4160521349 | ||
|
|
14c30ef2df | ||
|
|
e14cf8a5b9 | ||
|
|
30dbd5a900 | ||
|
|
25e6eab008 | ||
|
|
8bf562b7b6 | ||
|
|
7cb3c397b2 | ||
|
|
f44f2522ef | ||
|
|
8c9acf5a4d | ||
|
|
e46e7f5a81 |
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.19
|
||||
uses: github/codeql-action/init@v3.29.0
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.19
|
||||
uses: github/codeql-action/analyze@v3.29.0
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
39
.github/workflows/detect-duplicate-issues.yml
vendored
39
.github/workflows/detect-duplicate-issues.yml
vendored
@@ -133,12 +133,18 @@ jobs:
|
||||
|
||||
// Build search query for issues with any of the current integration labels
|
||||
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
|
||||
|
||||
// Calculate date 6 months ago
|
||||
const sixMonthsAgo = new Date();
|
||||
sixMonthsAgo.setMonth(sixMonthsAgo.getMonth() - 6);
|
||||
const dateFilter = `created:>=${sixMonthsAgo.toISOString().split('T')[0]}`;
|
||||
|
||||
let searchQuery;
|
||||
|
||||
if (labelQueries.length === 1) {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]}`;
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]} ${dateFilter}`;
|
||||
} else {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')})`;
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')}) ${dateFilter}`;
|
||||
}
|
||||
|
||||
console.log(`Search query: ${searchQuery}`);
|
||||
@@ -227,29 +233,34 @@ jobs:
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
You are a Home Assistant issue duplicate detector. Your task is to identify potential duplicate issues based on their content.
|
||||
You are a Home Assistant issue duplicate detector. Your task is to identify TRUE DUPLICATES - issues that report the EXACT SAME problem, not just similar or related issues.
|
||||
|
||||
CRITICAL: An issue is ONLY a duplicate if:
|
||||
- It describes the SAME problem with the SAME root cause
|
||||
- Issues about the same integration but different problems are NOT duplicates
|
||||
- Issues with similar symptoms but different causes are NOT duplicates
|
||||
|
||||
Important considerations:
|
||||
- Open issues are more relevant than closed ones for duplicate detection
|
||||
- Recently updated issues may indicate ongoing work or discussion
|
||||
- Issues with more comments are generally more relevant and active
|
||||
- Higher comment count often indicates community engagement and importance
|
||||
- Older closed issues might be resolved differently than newer approaches
|
||||
- Consider the time between issues - very old issues may have different contexts
|
||||
|
||||
Rules:
|
||||
1. Compare the current issue with the provided similar issues
|
||||
1. ONLY mark as duplicate if the issues describe IDENTICAL problems
|
||||
2. Look for issues that report the same problem or request the same functionality
|
||||
3. Consider different wording but same underlying issue as duplicates
|
||||
3. Different error messages = NOT a duplicate (even if same integration)
|
||||
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
|
||||
5. For OPEN issues, use a lower threshold (70%+ similarity)
|
||||
5. For OPEN issues, use a lower threshold (90%+ similarity)
|
||||
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
|
||||
7. Return ONLY a JSON array of issue numbers that are potential duplicates
|
||||
8. If no duplicates are found, return an empty array: []
|
||||
9. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||
10. Consider the age of issues - prefer recent duplicates over very old ones
|
||||
7. When in doubt, do NOT mark as duplicate
|
||||
8. Return ONLY a JSON array of issue numbers that are duplicates
|
||||
9. If no duplicates are found, return an empty array: []
|
||||
10. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||
11. Consider the age of issues - prefer recent duplicates over very old ones
|
||||
|
||||
Example response format:
|
||||
[1234, 5678, 9012]
|
||||
@@ -259,10 +270,10 @@ jobs:
|
||||
Title: ${{ steps.extract.outputs.current_title }}
|
||||
Body: ${{ steps.extract.outputs.current_body }}
|
||||
|
||||
Similar issues to compare against (each includes state, creation date, last update, and comment count):
|
||||
Other issues to compare against (each includes state, creation date, last update, and comment count):
|
||||
${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
Analyze these issues and identify which ones are potential duplicates of the current issue. Consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||
Analyze these issues and identify which ones describe IDENTICAL problems and thus are duplicates of the current issue. When sorting them, consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||
|
||||
max-tokens: 100
|
||||
|
||||
|
||||
23
.github/workflows/detect-non-english-issues.yml
vendored
23
.github/workflows/detect-non-english-issues.yml
vendored
@@ -64,16 +64,19 @@ jobs:
|
||||
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
|
||||
|
||||
Rules:
|
||||
1. Analyze the text and determine the primary language
|
||||
1. Analyze the text and determine the primary language of the USER'S DESCRIPTION only
|
||||
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
|
||||
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
|
||||
4. Consider technical terms, code snippets, and URLs as neutral (they don't indicate non-English)
|
||||
5. Focus on the actual sentences and descriptions written by the user
|
||||
6. Return ONLY a JSON object with two fields:
|
||||
- "is_english": boolean (true if the text is primarily in English, false otherwise)
|
||||
4. IGNORE error messages, logs, and system output even if not in code blocks - these often appear in the user's system language
|
||||
5. Consider technical terms, code snippets, URLs, and file paths as neutral (they don't indicate non-English)
|
||||
6. Focus ONLY on the actual sentences and descriptions written by the user explaining their issue
|
||||
7. If the user's explanation/description is in English but includes non-English error messages or logs, consider it ENGLISH
|
||||
8. Return ONLY a JSON object with two fields:
|
||||
- "is_english": boolean (true if the user's description is primarily in English, false otherwise)
|
||||
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
|
||||
7. Be lenient - if the text is mostly English with minor non-English elements, consider it English
|
||||
8. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||
9. Be lenient - if the user's explanation is in English with non-English system output, it's still English
|
||||
10. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||
11. If you cannot reliably determine the language, set detected_language to "undefined"
|
||||
|
||||
Example response:
|
||||
{"is_english": false, "detected_language": "Spanish"}
|
||||
@@ -122,6 +125,12 @@ jobs:
|
||||
return;
|
||||
}
|
||||
|
||||
// If language is undefined or not detected, skip processing
|
||||
if (!languageResult.detected_language || languageResult.detected_language === 'undefined') {
|
||||
console.log('Language could not be determined, skipping processing');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
|
||||
|
||||
// Post comment explaining the language requirement
|
||||
|
||||
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@@ -57,6 +57,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/aemet/ @Noltari
|
||||
/homeassistant/components/agent_dvr/ @ispysoftware
|
||||
/tests/components/agent_dvr/ @ispysoftware
|
||||
/homeassistant/components/ai_task/ @home-assistant/core
|
||||
/tests/components/ai_task/ @home-assistant/core
|
||||
/homeassistant/components/air_quality/ @home-assistant/core
|
||||
/tests/components/air_quality/ @home-assistant/core
|
||||
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
||||
@@ -1274,8 +1276,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/tests/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
|
||||
@@ -6,7 +6,7 @@ from jaraco.abode.exceptions import Exception as AbodeException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
@@ -70,6 +70,7 @@ def _trigger_automation(call: ServiceCall) -> None:
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
|
||||
99
homeassistant/components/ai_task/__init__.py
Normal file
99
homeassistant/components/ai_task/__init__.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Integration to offer AI tasks to Home Assistant."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, storage
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, DOMAIN
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .task import GenTextTask, GenTextTaskResult, async_generate_text
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AITaskEntity",
|
||||
"GenTextTask",
|
||||
"GenTextTaskResult",
|
||||
"async_generate_text",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Register the process service."""
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_conversation_http(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
gen_text_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
self._store: storage.Store[dict[str, str | None]] = storage.Store(
|
||||
hass, 1, DOMAIN
|
||||
)
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
data = await self._store.async_load()
|
||||
if data is None:
|
||||
return
|
||||
self.gen_text_entity_id = data.get("gen_text_entity_id")
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_text_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_text_entity_id", gen_text_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
self._store.async_delay_save(
|
||||
lambda: {
|
||||
"gen_text_entity_id": self.gen_text_entity_id,
|
||||
},
|
||||
10,
|
||||
)
|
||||
|
||||
@callback
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {
|
||||
"gen_text_entity_id": self.gen_text_entity_id,
|
||||
}
|
||||
21
homeassistant/components/ai_task/const.py
Normal file
21
homeassistant/components/ai_task/const.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Constants for the AI Task integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
)
|
||||
95
homeassistant/components/ai_task/entity.py
Normal file
95
homeassistant/components/ai_task/entity.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Entity for the AI Task integration."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import contextlib
|
||||
from typing import final
|
||||
|
||||
from homeassistant.components.conversation import (
|
||||
ChatLog,
|
||||
UserContent,
|
||||
async_get_chat_log,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN
|
||||
from .task import GenTextTask, GenTextTaskResult
|
||||
|
||||
|
||||
class AITaskEntity(RestoreEntity):
|
||||
"""Entity that supports conversations."""
|
||||
|
||||
_attr_should_poll = False
|
||||
__last_activity: str | None = None
|
||||
|
||||
@property
|
||||
@final
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the entity."""
|
||||
if self.__last_activity is None:
|
||||
return None
|
||||
return self.__last_activity
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if (
|
||||
state is not None
|
||||
and state.state is not None
|
||||
and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
):
|
||||
self.__last_activity = state.state
|
||||
|
||||
@final
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
None,
|
||||
) as chat_log,
|
||||
):
|
||||
await chat_log.async_provide_llm_data(
|
||||
llm.LLMContext(
|
||||
platform=self.platform.domain,
|
||||
context=None,
|
||||
language=None,
|
||||
assistant=DOMAIN,
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(UserContent(task.instructions))
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_text(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a gen text task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
return await self._async_generate_text(task, chat_log)
|
||||
|
||||
async def _async_generate_text(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenTextTaskResult:
|
||||
"""Handle a gen text task."""
|
||||
raise NotImplementedError
|
||||
82
homeassistant/components/ai_task/http.py
Normal file
82
homeassistant/components/ai_task/http.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""HTTP endpoint for AI Task integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DATA_PREFERENCES
|
||||
from .task import async_generate_text
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the HTTP API for the conversation integration."""
|
||||
websocket_api.async_register_command(hass, websocket_generate_text)
|
||||
websocket_api.async_register_command(hass, websocket_get_preferences)
|
||||
websocket_api.async_register_command(hass, websocket_set_preferences)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/generate_text",
|
||||
vol.Required("task_name"): str,
|
||||
vol.Optional("entity_id"): str,
|
||||
vol.Required("instructions"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
async def websocket_generate_text(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Run a generate text task."""
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
try:
|
||||
result = await async_generate_text(hass=hass, **msg)
|
||||
except ValueError as err:
|
||||
connection.send_error(msg_id, websocket_api.const.ERR_UNKNOWN_ERROR, str(err))
|
||||
return
|
||||
connection.send_result(msg_id, result.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/get",
|
||||
}
|
||||
)
|
||||
@callback
|
||||
def websocket_get_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
connection.send_result(msg["id"], preferences.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_text_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_set_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
preferences.async_set_preferences(**msg)
|
||||
connection.send_result(msg_id, preferences.as_dict())
|
||||
9
homeassistant/components/ai_task/manifest.json
Normal file
9
homeassistant/components/ai_task/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
68
homeassistant/components/ai_task/task.py
Normal file
68
homeassistant/components/ai_task/task.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""AI tasks to be handled by agents."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES
|
||||
|
||||
|
||||
async def async_generate_text(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_text_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise ValueError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise ValueError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
return await entity.internal_async_generate_text(
|
||||
GenTextTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenTextTask:
|
||||
"""Gen text task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenTextTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenTextTaskResult:
|
||||
"""Result of gen text task."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
result: str
|
||||
"""Result of the task."""
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"result": self.result,
|
||||
}
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.0.6"]
|
||||
"requirements": ["aioamazondevices==3.1.4"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -22,6 +23,7 @@ PARALLEL_UPDATES = 1
|
||||
class AmazonNotifyEntityDescription(NotifyEntityDescription):
|
||||
"""Alexa Devices notify entity description."""
|
||||
|
||||
is_supported: Callable[[AmazonDevice], bool] = lambda _device: True
|
||||
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
|
||||
subkey: str
|
||||
|
||||
@@ -31,6 +33,7 @@ NOTIFY: Final = (
|
||||
key="speak",
|
||||
translation_key="speak",
|
||||
subkey="AUDIO_PLAYER",
|
||||
is_supported=lambda _device: _device.device_family != SPEAKER_GROUP_FAMILY,
|
||||
method=lambda api, device, message: api.call_alexa_speak(device, message),
|
||||
),
|
||||
AmazonNotifyEntityDescription(
|
||||
@@ -58,6 +61,7 @@ async def async_setup_entry(
|
||||
for sensor_desc in NOTIFY
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
and sensor_desc.is_supported(coordinator.data[serial_num])
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
@@ -15,6 +15,7 @@ from .const import CAMERAS, DATA_AMCREST, DOMAIN
|
||||
from .helpers import service_signal
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Amcrest IP Camera services."""
|
||||
|
||||
|
||||
@@ -366,15 +366,35 @@ class AnthropicConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
@@ -424,7 +444,7 @@ class AnthropicConversationEntity(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
@@ -435,17 +455,6 @@ class AnthropicConversationEntity(
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
|
||||
@@ -89,7 +89,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
def get_aruba_data(self) -> dict[str, dict[str, str]] | None:
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
connect = f"ssh {self.username}@{self.host}"
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
|
||||
@@ -27,7 +27,6 @@ from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.temperature import display_temp as show_temp
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_suggest_report_issue
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
@@ -535,26 +534,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return
|
||||
modes_str: str = ", ".join(modes) if modes else ""
|
||||
translation_key = f"not_valid_{mode_type}_mode"
|
||||
if mode_type == "hvac":
|
||||
report_issue = async_suggest_report_issue(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s::%s sets the hvac_mode %s which is not "
|
||||
"valid for this entity with modes: %s. "
|
||||
"This will stop working in 2025.4 and raise an error instead. "
|
||||
"Please %s"
|
||||
),
|
||||
self.platform.platform_name,
|
||||
self.__class__.__name__,
|
||||
mode,
|
||||
modes_str,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=translation_key,
|
||||
|
||||
@@ -258,6 +258,9 @@
|
||||
"not_valid_preset_mode": {
|
||||
"message": "Preset mode {mode} is not valid. Valid preset modes are: {modes}."
|
||||
},
|
||||
"not_valid_hvac_mode": {
|
||||
"message": "HVAC mode {mode} is not valid. Valid HVAC modes are: {modes}."
|
||||
},
|
||||
"not_valid_swing_mode": {
|
||||
"message": "Swing mode {mode} is not valid. Valid swing modes are: {modes}."
|
||||
},
|
||||
|
||||
@@ -14,12 +14,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, intent, llm, template
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import DOMAIN
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
@@ -359,7 +358,7 @@ class ChatLog:
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
prompt: str,
|
||||
language: str,
|
||||
language: str | None,
|
||||
user_name: str | None = None,
|
||||
) -> str:
|
||||
try:
|
||||
@@ -373,7 +372,7 @@ class ChatLog:
|
||||
)
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=language)
|
||||
intent_response = intent.IntentResponse(language=language or "")
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Sorry, I had a problem with my template",
|
||||
@@ -392,15 +391,25 @@ class ChatLog:
|
||||
user_llm_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_context = llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
frame.report_usage(
|
||||
"ChatLog.async_update_llm_data",
|
||||
breaks_in_ha_version="2026.1",
|
||||
)
|
||||
return await self.async_provide_llm_data(
|
||||
llm_context=user_input.as_llm_context(conversing_domain),
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
user_llm_prompt=user_llm_prompt,
|
||||
user_extra_system_prompt=user_input.extra_system_prompt,
|
||||
)
|
||||
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api:
|
||||
@@ -414,10 +423,12 @@ class ChatLog:
|
||||
LOGGER.error(
|
||||
"Error getting LLM API %s for %s: %s",
|
||||
user_llm_hass_api,
|
||||
conversing_domain,
|
||||
llm_context.platform,
|
||||
err,
|
||||
)
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response = intent.IntentResponse(
|
||||
language=llm_context.language or ""
|
||||
)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Error preparing LLM API",
|
||||
@@ -431,10 +442,10 @@ class ChatLog:
|
||||
user_name: str | None = None
|
||||
|
||||
if (
|
||||
user_input.context
|
||||
and user_input.context.user_id
|
||||
llm_context.context
|
||||
and llm_context.context.user_id
|
||||
and (
|
||||
user := await self.hass.auth.async_get_user(user_input.context.user_id)
|
||||
user := await self.hass.auth.async_get_user(llm_context.context.user_id)
|
||||
)
|
||||
):
|
||||
user_name = user.name
|
||||
@@ -444,7 +455,7 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
(user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
@@ -456,14 +467,14 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
llm.BASE_PROMPT,
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
|
||||
if extra_system_prompt := (
|
||||
# Take new system prompt if one was given
|
||||
user_input.extra_system_prompt or self.extra_system_prompt
|
||||
user_extra_system_prompt or self.extra_system_prompt
|
||||
):
|
||||
prompt_parts.append(extra_system_prompt)
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@ from dataclasses import dataclass
|
||||
from typing import Any, Literal
|
||||
|
||||
from homeassistant.core import Context
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers import intent, llm
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -56,6 +58,16 @@ class ConversationInput:
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
def as_llm_context(self, conversing_domain: str) -> llm.LLMContext:
|
||||
"""Return input as an LLM context."""
|
||||
return llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=self.context,
|
||||
language=self.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=self.device_id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ConversationResult:
|
||||
|
||||
@@ -164,8 +164,6 @@ class DeconzThermostat(DeconzDevice[Thermostat], ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode not in self._attr_hvac_modes:
|
||||
raise ValueError(f"Unsupported HVAC mode {hvac_mode}")
|
||||
|
||||
if len(self._attr_hvac_modes) == 2: # Only allow turn on and off thermostat
|
||||
await self.hub.api.sensors.thermostat.set_config(
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
"requirements": ["aiodns==3.5.0"]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import threading
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.util import raise_if_invalid_filename, raise_if_invalid_path
|
||||
@@ -141,6 +141,7 @@ def download_file(service: ServiceCall) -> None:
|
||||
threading.Thread(target=do_download).start()
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the services for the downloader component."""
|
||||
async_register_admin_service(
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -63,6 +63,7 @@ def _set_time_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(service).set_time(dt_util.now())
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Create ElkM1 services."""
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
from eq3btsmart.thermostat_config import ThermostatConfig
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -53,12 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
f"[{eq3_config.mac_address}] Device could not be found"
|
||||
)
|
||||
|
||||
thermostat = Thermostat(
|
||||
thermostat_config=ThermostatConfig(
|
||||
mac_address=mac_address,
|
||||
),
|
||||
ble_device=device,
|
||||
)
|
||||
thermostat = Thermostat(mac_address=device) # type: ignore[arg-type]
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -80,7 +79,4 @@ class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
"""Platform for eQ-3 climate entities."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.const import (
|
||||
EQ3_DEFAULT_AWAY_TEMP,
|
||||
EQ3_MAX_TEMP,
|
||||
EQ3_OFF_TEMP,
|
||||
Eq3OperationMode,
|
||||
Eq3Preset,
|
||||
)
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -20,9 +27,11 @@ from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
CurrentTemperatureSelector,
|
||||
@@ -57,8 +66,8 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_min_temp = EQ3BT_OFF_TEMP
|
||||
_attr_max_temp = EQ3BT_MAX_TEMP
|
||||
_attr_min_temp = EQ3_OFF_TEMP
|
||||
_attr_max_temp = EQ3_MAX_TEMP
|
||||
_attr_precision = PRECISION_HALVES
|
||||
_attr_hvac_modes = list(HA_TO_EQ_HVAC.keys())
|
||||
_attr_preset_modes = list(Preset)
|
||||
@@ -70,38 +79,21 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
_target_temperature: float | None = None
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
if self._thermostat.status is not None:
|
||||
self._async_on_status_updated()
|
||||
|
||||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
super()._async_on_updated()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._target_temperature = self._thermostat.status.target_temperature
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
self._attr_target_temperature = self._get_target_temperature()
|
||||
self._attr_preset_mode = self._get_current_preset_mode()
|
||||
self._attr_hvac_action = self._get_current_hvac_action()
|
||||
super()._async_on_status_updated(data)
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self) -> None:
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
@@ -109,8 +101,9 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
serial_number=self._thermostat.device_data.device_serial,
|
||||
)
|
||||
super()._async_on_device_updated(data)
|
||||
|
||||
def _get_current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
@@ -119,17 +112,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case CurrentTemperatureSelector.NOTHING:
|
||||
return None
|
||||
case CurrentTemperatureSelector.VALVE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.valve_temperature)
|
||||
case CurrentTemperatureSelector.UI:
|
||||
return self._target_temperature
|
||||
case CurrentTemperatureSelector.DEVICE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
case CurrentTemperatureSelector.ENTITY:
|
||||
state = self.hass.states.get(self._eq3_config.external_temp_sensor)
|
||||
if state is not None:
|
||||
@@ -147,16 +134,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case TargetTemperatureSelector.TARGET:
|
||||
return self._target_temperature
|
||||
case TargetTemperatureSelector.LAST_REPORTED:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
|
||||
def _get_current_preset_mode(self) -> str:
|
||||
"""Return the current preset mode."""
|
||||
|
||||
if (status := self._thermostat.status) is None:
|
||||
return PRESET_NONE
|
||||
status = self._thermostat.status
|
||||
if status.is_window_open:
|
||||
return Preset.WINDOW_OPEN
|
||||
if status.is_boost:
|
||||
@@ -165,7 +148,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
return Preset.LOW_BATTERY
|
||||
if status.is_away:
|
||||
return Preset.AWAY
|
||||
if status.operation_mode is OperationMode.ON:
|
||||
if status.operation_mode is Eq3OperationMode.ON:
|
||||
return Preset.OPEN
|
||||
if status.presets is None:
|
||||
return PRESET_NONE
|
||||
@@ -179,10 +162,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
def _get_current_hvac_action(self) -> HVACAction:
|
||||
"""Return the current hvac action."""
|
||||
|
||||
if (
|
||||
self._thermostat.status is None
|
||||
or self._thermostat.status.operation_mode is OperationMode.OFF
|
||||
):
|
||||
if self._thermostat.status.operation_mode is Eq3OperationMode.OFF:
|
||||
return HVACAction.OFF
|
||||
if self._thermostat.status.valve == 0:
|
||||
return HVACAction.IDLE
|
||||
@@ -227,7 +207,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
"""Set new target hvac mode."""
|
||||
|
||||
if hvac_mode is HVACMode.OFF:
|
||||
await self.async_set_temperature(temperature=EQ3BT_OFF_TEMP)
|
||||
await self.async_set_temperature(temperature=EQ3_OFF_TEMP)
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_mode(HA_TO_EQ_HVAC[hvac_mode])
|
||||
@@ -241,10 +221,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case Preset.BOOST:
|
||||
await self._thermostat.async_set_boost(True)
|
||||
case Preset.AWAY:
|
||||
await self._thermostat.async_set_away(True)
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
await self._thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
case Preset.ECO:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.ECO)
|
||||
case Preset.COMFORT:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.COMFORT)
|
||||
case Preset.OPEN:
|
||||
await self._thermostat.async_set_mode(OperationMode.ON)
|
||||
await self._thermostat.async_set_mode(Eq3OperationMode.ON)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from eq3btsmart.const import OperationMode
|
||||
from eq3btsmart.const import Eq3OperationMode
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
@@ -34,17 +34,17 @@ ENTITY_KEY_AWAY_UNTIL = "away_until"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
OperationMode.ON: HVACMode.HEAT,
|
||||
OperationMode.AUTO: HVACMode.AUTO,
|
||||
OperationMode.MANUAL: HVACMode.HEAT,
|
||||
EQ_TO_HA_HVAC: dict[Eq3OperationMode, HVACMode] = {
|
||||
Eq3OperationMode.OFF: HVACMode.OFF,
|
||||
Eq3OperationMode.ON: HVACMode.HEAT,
|
||||
Eq3OperationMode.AUTO: HVACMode.AUTO,
|
||||
Eq3OperationMode.MANUAL: HVACMode.HEAT,
|
||||
}
|
||||
|
||||
HA_TO_EQ_HVAC = {
|
||||
HVACMode.OFF: OperationMode.OFF,
|
||||
HVACMode.AUTO: OperationMode.AUTO,
|
||||
HVACMode.HEAT: OperationMode.MANUAL,
|
||||
HVACMode.OFF: Eq3OperationMode.OFF,
|
||||
HVACMode.AUTO: Eq3OperationMode.AUTO,
|
||||
HVACMode.HEAT: Eq3OperationMode.MANUAL,
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ class TargetTemperatureSelector(str, Enum):
|
||||
DEFAULT_CURRENT_TEMP_SELECTOR = CurrentTemperatureSelector.DEVICE
|
||||
DEFAULT_TARGET_TEMP_SELECTOR = TargetTemperatureSelector.TARGET
|
||||
DEFAULT_SCAN_INTERVAL = 10 # seconds
|
||||
DEFAULT_AWAY_HOURS = 30 * 24
|
||||
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
|
||||
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Eq3Exception
|
||||
from eq3btsmart.const import Eq3Event
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
@@ -45,7 +50,15 @@ class Eq3Entity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -65,10 +78,25 @@ class Eq3Entity(Entity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
@callback
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -90,4 +118,9 @@ class Eq3Entity(Entity):
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available."""
|
||||
|
||||
return self._thermostat.status is not None and self._attr_available
|
||||
try:
|
||||
_ = self._thermostat.status
|
||||
except Eq3Exception:
|
||||
return False
|
||||
|
||||
return self._attr_available
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.16.0"]
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==2.16.0"]
|
||||
}
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
"""Platform for eq3 number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import (
|
||||
EQ3BT_MAX_OFFSET,
|
||||
EQ3BT_MAX_TEMP,
|
||||
EQ3BT_MIN_OFFSET,
|
||||
EQ3BT_MIN_TEMP,
|
||||
)
|
||||
from eq3btsmart.models import Presets
|
||||
from eq3btsmart.const import EQ3_MAX_OFFSET, EQ3_MAX_TEMP, EQ3_MIN_OFFSET, EQ3_MIN_TEMP
|
||||
from eq3btsmart.models import Presets, Status
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
@@ -42,7 +37,7 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
value_func: Callable[[Presets], float]
|
||||
value_set_func: Callable[
|
||||
[Thermostat],
|
||||
Callable[[float], Awaitable[None]],
|
||||
Callable[[float], Coroutine[None, None, Status]],
|
||||
]
|
||||
mode: NumberMode = NumberMode.BOX
|
||||
entity_category: EntityCategory | None = EntityCategory.CONFIG
|
||||
@@ -51,44 +46,44 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_COMFORT,
|
||||
value_func=lambda presets: presets.comfort_temperature.value,
|
||||
value_func=lambda presets: presets.comfort_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
|
||||
translation_key=ENTITY_KEY_COMFORT,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_ECO,
|
||||
value_func=lambda presets: presets.eco_temperature.value,
|
||||
value_func=lambda presets: presets.eco_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
|
||||
translation_key=ENTITY_KEY_ECO,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
value_func=lambda presets: presets.window_open_temperature.value,
|
||||
value_func=lambda presets: presets.window_open_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_OFFSET,
|
||||
value_func=lambda presets: presets.offset_temperature.value,
|
||||
value_func=lambda presets: presets.offset_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
|
||||
translation_key=ENTITY_KEY_OFFSET,
|
||||
native_min_value=EQ3BT_MIN_OFFSET,
|
||||
native_max_value=EQ3BT_MAX_OFFSET,
|
||||
native_min_value=EQ3_MIN_OFFSET,
|
||||
native_max_value=EQ3_MAX_OFFSET,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
@@ -96,7 +91,7 @@ NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
|
||||
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
|
||||
value_func=lambda presets: presets.window_open_time.total_seconds() / 60,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
@@ -137,7 +132,6 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return the state of the entity."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
assert self._thermostat.status.presets is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status.presets)
|
||||
@@ -152,7 +146,7 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return whether the entity is available."""
|
||||
|
||||
return (
|
||||
self._thermostat.status is not None
|
||||
super().available
|
||||
and self._thermostat.status.presets is not None
|
||||
and self._attr_available
|
||||
)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""Voluptuous schemas for eq3btsmart."""
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_MIN_TEMP
|
||||
from eq3btsmart.const import EQ3_MAX_TEMP, EQ3_MIN_TEMP
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3BT_MIN_TEMP, max=EQ3BT_MAX_TEMP)
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3_MIN_TEMP, max=EQ3_MAX_TEMP)
|
||||
SCHEMA_DEVICE = vol.Schema({vol.Required(CONF_MAC): cv.string})
|
||||
SCHEMA_MAC = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -40,9 +39,7 @@ SENSOR_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SensorEntityDescription(
|
||||
key=ENTITY_KEY_AWAY_UNTIL,
|
||||
translation_key=ENTITY_KEY_AWAY_UNTIL,
|
||||
value_func=lambda status: (
|
||||
status.away_until.value if status.away_until else None
|
||||
),
|
||||
value_func=lambda status: (status.away_until if status.away_until else None),
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
),
|
||||
]
|
||||
@@ -78,7 +75,4 @@ class Eq3SensorEntity(Eq3Entity, SensorEntity):
|
||||
def native_value(self) -> int | datetime | None:
|
||||
"""Return the value reported by the sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -1,26 +1,45 @@
|
||||
"""Platform for eq3 switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3_DEFAULT_AWAY_TEMP, Eq3OperationMode
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
ENTITY_KEY_AWAY,
|
||||
ENTITY_KEY_BOOST,
|
||||
ENTITY_KEY_LOCK,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
async def async_set_away(thermostat: Thermostat, enable: bool) -> Status:
|
||||
"""Backport old async_set_away behavior."""
|
||||
|
||||
if not enable:
|
||||
return await thermostat.async_set_mode(Eq3OperationMode.AUTO)
|
||||
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
return await thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3SwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Entity description for eq3 switch entities."""
|
||||
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Coroutine[None, None, Status]]]
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
@@ -40,7 +59,7 @@ SWITCH_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_AWAY,
|
||||
translation_key=ENTITY_KEY_AWAY,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_away,
|
||||
toggle_func=lambda thermostat: partial(async_set_away, thermostat),
|
||||
value_func=lambda status: status.is_away,
|
||||
),
|
||||
]
|
||||
@@ -88,7 +107,4 @@ class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
@@ -35,6 +35,7 @@ async def _async_service_handle(service: ServiceCall) -> None:
|
||||
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_RESTART, entity_ids)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register FFmpeg services."""
|
||||
|
||||
|
||||
@@ -83,8 +83,8 @@ class FibaroLight(FibaroEntity, LightEntity):
|
||||
)
|
||||
supports_dimming = (
|
||||
fibaro_device.has_interface("levelChange")
|
||||
and "setValue" in fibaro_device.actions
|
||||
)
|
||||
or fibaro_device.type == "com.fibaro.multilevelSwitch"
|
||||
) and "setValue" in fibaro_device.actions
|
||||
|
||||
if supports_color and supports_white_v:
|
||||
self._attr_supported_color_modes = {ColorMode.RGBW}
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"auth": {
|
||||
"title": "Link Fitbit"
|
||||
|
||||
@@ -125,8 +125,6 @@ class Device(CoordinatorEntity[FreedomproDataUpdateCoordinator], ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Async function to set mode to climate."""
|
||||
if hvac_mode not in SUPPORTED_HVAC_MODES:
|
||||
raise ValueError(f"Got unsupported hvac_mode {hvac_mode}")
|
||||
|
||||
payload = {"heatingCoolingState": HVAC_INVERT_MAP[hvac_mode]}
|
||||
await put_state(
|
||||
|
||||
@@ -33,7 +33,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up fritzboxtools integration."""
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from fritzconnection.core.exceptions import (
|
||||
from fritzconnection.lib.fritzwlan import DEFAULT_PASSWORD_LENGTH
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
@@ -64,7 +64,8 @@ async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
|
||||
) from ex
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Fritz integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250531.2"]
|
||||
"requirements": ["home-assistant-frontend==20250531.3"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
@@ -49,6 +50,7 @@ async def _send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -45,7 +45,10 @@ CONF_IMAGE_FILENAME = "image_filename"
|
||||
CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (
|
||||
Platform.CONVERSATION,
|
||||
Platform.TTS,
|
||||
)
|
||||
|
||||
type GoogleGenerativeAIConfigEntry = ConfigEntry[Client]
|
||||
|
||||
|
||||
@@ -6,9 +6,11 @@ DOMAIN = "google_generative_ai_conversation"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
ATTR_MODEL = "model"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
|
||||
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
||||
@@ -2,63 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
from .const import CONF_PROMPT, DOMAIN, LOGGER
|
||||
from .entity import ERROR_GETTING_RESPONSE, GoogleGenerativeAILLMBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -71,265 +26,18 @@ async def async_setup_entry(
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent,
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
conversation.ConversationEntity,
|
||||
conversation.AbstractConversationAgent,
|
||||
GoogleGenerativeAILLMBaseEntity,
|
||||
):
|
||||
"""Google Generative AI conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
super().__init__(entry)
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
@@ -356,13 +64,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
conversation.async_unset_agent(self.hass, self.entry)
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
def _fix_tool_name(self, tool_name: str) -> str:
|
||||
"""Fix tool name if needed."""
|
||||
# The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool
|
||||
# name. This makes sure when it incorrectly changes the name, that we change it
|
||||
# back for HA to call.
|
||||
return tool_name if tool_name != "HasListAddItem" else "HassListAddItem"
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -372,162 +73,16 @@ class GoogleGenerativeAIConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
chat_request: str | list[Part] = user_input.text
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
response = intent.IntentResponse(language=user_input.language)
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
@@ -535,7 +90,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"Last content in chat log is not an AssistantContent: %s. This could be due to the model not returning a valid response",
|
||||
chat_log.content[-1],
|
||||
)
|
||||
raise HomeAssistantError(f"{ERROR_GETTING_RESPONSE}")
|
||||
raise HomeAssistantError(ERROR_GETTING_RESPONSE)
|
||||
response.async_set_speech(chat_log.content[-1].content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=response,
|
||||
|
||||
@@ -0,0 +1,475 @@
|
||||
"""Conversation support for the Google Generative AI Conversation integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: (
|
||||
conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent
|
||||
),
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Google Generative AI base entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
user_message = chat_log.content[-1]
|
||||
assert isinstance(user_message, conversation.UserContent)
|
||||
chat_request: str | list[Part] = user_message.content
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -0,0 +1,216 @@
|
||||
"""Text to speech support for Google Generative AI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import io
|
||||
import logging
|
||||
from typing import Any
|
||||
import wave
|
||||
|
||||
from google.genai import types
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_VOICE,
|
||||
TextToSpeechEntity,
|
||||
TtsAudioType,
|
||||
Voice,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_MODEL, DOMAIN, RECOMMENDED_TTS_MODEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up TTS entity."""
|
||||
tts_entity = GoogleGenerativeAITextToSpeechEntity(config_entry)
|
||||
async_add_entities([tts_entity])
|
||||
|
||||
|
||||
class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
"""Google Generative AI text-to-speech entity."""
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#languages
|
||||
_attr_supported_languages = [
|
||||
"ar-EG",
|
||||
"bn-BD",
|
||||
"de-DE",
|
||||
"en-IN",
|
||||
"en-US",
|
||||
"es-US",
|
||||
"fr-FR",
|
||||
"hi-IN",
|
||||
"id-ID",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"ko-KR",
|
||||
"mr-IN",
|
||||
"nl-NL",
|
||||
"pl-PL",
|
||||
"pt-BR",
|
||||
"ro-RO",
|
||||
"ru-RU",
|
||||
"ta-IN",
|
||||
"te-IN",
|
||||
"th-TH",
|
||||
"tr-TR",
|
||||
"uk-UA",
|
||||
"vi-VN",
|
||||
]
|
||||
_attr_default_language = "en-US"
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
||||
_supported_voices = [
|
||||
Voice(voice.split(" ", 1)[0].lower(), voice)
|
||||
for voice in (
|
||||
"Zephyr (Bright)",
|
||||
"Puck (Upbeat)",
|
||||
"Charon (Informative)",
|
||||
"Kore (Firm)",
|
||||
"Fenrir (Excitable)",
|
||||
"Leda (Youthful)",
|
||||
"Orus (Firm)",
|
||||
"Aoede (Breezy)",
|
||||
"Callirrhoe (Easy-going)",
|
||||
"Autonoe (Bright)",
|
||||
"Enceladus (Breathy)",
|
||||
"Iapetus (Clear)",
|
||||
"Umbriel (Easy-going)",
|
||||
"Algieba (Smooth)",
|
||||
"Despina (Smooth)",
|
||||
"Erinome (Clear)",
|
||||
"Algenib (Gravelly)",
|
||||
"Rasalgethi (Informative)",
|
||||
"Laomedeia (Upbeat)",
|
||||
"Achernar (Soft)",
|
||||
"Alnilam (Firm)",
|
||||
"Schedar (Even)",
|
||||
"Gacrux (Mature)",
|
||||
"Pulcherrima (Forward)",
|
||||
"Achird (Friendly)",
|
||||
"Zubenelgenubi (Casual)",
|
||||
"Vindemiatrix (Gentle)",
|
||||
"Sadachbia (Lively)",
|
||||
"Sadaltager (Knowledgeable)",
|
||||
"Sulafat (Warm)",
|
||||
)
|
||||
]
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize Google Generative AI Conversation speech entity."""
|
||||
self.entry = entry
|
||||
self._attr_name = "Google Generative AI TTS"
|
||||
self._attr_unique_id = f"{entry.entry_id}_tts"
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._genai_client = entry.runtime_data
|
||||
self._default_voice_id = self._supported_voices[0].voice_id
|
||||
|
||||
@callback
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice] | None:
|
||||
"""Return a list of supported voices for a language."""
|
||||
return self._supported_voices
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load tts audio file from the engine."""
|
||||
try:
|
||||
response = self._genai_client.models.generate_content(
|
||||
model=options.get(ATTR_MODEL, RECOMMENDED_TTS_MODEL),
|
||||
contents=message,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=["AUDIO"],
|
||||
speech_config=types.SpeechConfig(
|
||||
voice_config=types.VoiceConfig(
|
||||
prebuilt_voice_config=types.PrebuiltVoiceConfig(
|
||||
voice_name=options.get(
|
||||
ATTR_VOICE, self._default_voice_id
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
data = response.candidates[0].content.parts[0].inline_data.data
|
||||
mime_type = response.candidates[0].content.parts[0].inline_data.mime_type
|
||||
except Exception as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
)
|
||||
raise HomeAssistantError(exc) from exc
|
||||
return "wav", self._convert_to_wav(data, mime_type)
|
||||
|
||||
def _convert_to_wav(self, audio_data: bytes, mime_type: str) -> bytes:
|
||||
"""Generate a WAV file header for the given audio data and parameters.
|
||||
|
||||
Args:
|
||||
audio_data: The raw audio data as a bytes object.
|
||||
mime_type: Mime type of the audio data.
|
||||
|
||||
Returns:
|
||||
A bytes object representing the WAV file header.
|
||||
|
||||
"""
|
||||
parameters = self._parse_audio_mime_type(mime_type)
|
||||
|
||||
wav_buffer = io.BytesIO()
|
||||
with wave.open(wav_buffer, "wb") as wf:
|
||||
wf.setnchannels(1)
|
||||
wf.setsampwidth(parameters["bits_per_sample"] // 8)
|
||||
wf.setframerate(parameters["rate"])
|
||||
wf.writeframes(audio_data)
|
||||
|
||||
return wav_buffer.getvalue()
|
||||
|
||||
def _parse_audio_mime_type(self, mime_type: str) -> dict[str, int]:
|
||||
"""Parse bits per sample and rate from an audio MIME type string.
|
||||
|
||||
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
|
||||
|
||||
Args:
|
||||
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
|
||||
|
||||
Returns:
|
||||
A dictionary with "bits_per_sample" and "rate" keys. Values will be
|
||||
integers if found, otherwise None.
|
||||
|
||||
"""
|
||||
if not mime_type.startswith("audio/L"):
|
||||
_LOGGER.warning("Received unexpected MIME type %s", mime_type)
|
||||
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
|
||||
|
||||
bits_per_sample = 16
|
||||
rate = 24000
|
||||
|
||||
# Extract rate from parameters
|
||||
parts = mime_type.split(";")
|
||||
for param in parts: # Skip the main type part
|
||||
param = param.strip()
|
||||
if param.lower().startswith("rate="):
|
||||
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
|
||||
with suppress(ValueError, IndexError):
|
||||
rate_str = param.split("=", 1)[1]
|
||||
rate = int(rate_str)
|
||||
elif param.startswith("audio/L"):
|
||||
# Keep bits_per_sample as default if conversion fails
|
||||
with suppress(ValueError, IndexError):
|
||||
bits_per_sample = int(param.split("L", 1)[1])
|
||||
|
||||
return {"bits_per_sample": bits_per_sample, "rate": rate}
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -77,85 +78,85 @@ def _read_file_contents(
|
||||
return results
|
||||
|
||||
|
||||
async def _async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
call.hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await call.hass.async_add_executor_job(
|
||||
_read_file_contents, call.hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(SimpleMediaItem(upload_token=upload_result.upload_token))
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register Google Photos services."""
|
||||
|
||||
async def async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await hass.async_add_executor_job(
|
||||
_read_file_contents, hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(
|
||||
SimpleMediaItem(upload_token=upload_result.upload_token)
|
||||
)
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
UPLOAD_SERVICE,
|
||||
async_handle_upload,
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -13,7 +13,7 @@ from gspread.utils import ValueInputOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -76,6 +76,7 @@ async def _async_append_to_sheet(call: ServiceCall) -> None:
|
||||
await call.hass.async_add_executor_job(_append_to_sheet, call, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Sheets."""
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -35,6 +35,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -249,6 +250,7 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
return entry
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
"""Set up services for Habitica integration."""
|
||||
|
||||
|
||||
@@ -9,7 +9,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
"""The homee event platform."""
|
||||
|
||||
from pyHomee.const import AttributeType
|
||||
from pyHomee.const import AttributeType, NodeProfile
|
||||
from pyHomee.model import HomeeAttribute
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.components.event import (
|
||||
EventDeviceClass,
|
||||
EventEntity,
|
||||
EventEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -13,6 +17,38 @@ from .entity import HomeeEntity
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
REMOTE_PROFILES = [
|
||||
NodeProfile.REMOTE,
|
||||
NodeProfile.TWO_BUTTON_REMOTE,
|
||||
NodeProfile.THREE_BUTTON_REMOTE,
|
||||
NodeProfile.FOUR_BUTTON_REMOTE,
|
||||
]
|
||||
|
||||
EVENT_DESCRIPTIONS: dict[AttributeType, EventEntityDescription] = {
|
||||
AttributeType.BUTTON_STATE: EventEntityDescription(
|
||||
key="button_state",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=["upper", "lower", "released"],
|
||||
),
|
||||
AttributeType.UP_DOWN_REMOTE: EventEntityDescription(
|
||||
key="up_down_remote",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=[
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HomeeConfigEntry,
|
||||
@@ -21,30 +57,31 @@ async def async_setup_entry(
|
||||
"""Add event entities for homee."""
|
||||
|
||||
async_add_entities(
|
||||
HomeeEvent(attribute, config_entry)
|
||||
HomeeEvent(attribute, config_entry, EVENT_DESCRIPTIONS[attribute.type])
|
||||
for node in config_entry.runtime_data.nodes
|
||||
for attribute in node.attributes
|
||||
if attribute.type == AttributeType.UP_DOWN_REMOTE
|
||||
if attribute.type in EVENT_DESCRIPTIONS
|
||||
and node.profile in REMOTE_PROFILES
|
||||
and not attribute.editable
|
||||
)
|
||||
|
||||
|
||||
class HomeeEvent(HomeeEntity, EventEntity):
|
||||
"""Representation of a homee event."""
|
||||
|
||||
_attr_translation_key = "up_down_remote"
|
||||
_attr_event_types = [
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
]
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
def __init__(
|
||||
self,
|
||||
attribute: HomeeAttribute,
|
||||
entry: HomeeConfigEntry,
|
||||
description: EventEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the homee event entity."""
|
||||
super().__init__(attribute, entry)
|
||||
self.entity_description = description
|
||||
self._attr_translation_key = description.key
|
||||
if attribute.instance > 0:
|
||||
self._attr_translation_key = f"{self._attr_translation_key}_instance"
|
||||
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add the homee event entity to home assistant."""
|
||||
@@ -56,6 +93,5 @@ class HomeeEvent(HomeeEntity, EventEntity):
|
||||
@callback
|
||||
def _event_triggered(self, event: HomeeAttribute) -> None:
|
||||
"""Handle a homee event."""
|
||||
if event.type == AttributeType.UP_DOWN_REMOTE:
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -160,12 +160,36 @@
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button_state": {
|
||||
"name": "Switch",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "Upper button",
|
||||
"lower": "Lower button",
|
||||
"released": "Released"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"button_state_instance": {
|
||||
"name": "Switch {instance}",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::upper%]",
|
||||
"lower": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::lower%]",
|
||||
"released": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"up_down_remote": {
|
||||
"name": "Up/down remote",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"release": "Released",
|
||||
"release": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]",
|
||||
"up": "Up",
|
||||
"down": "Down",
|
||||
"stop": "Stop",
|
||||
|
||||
@@ -63,7 +63,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -216,8 +216,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode not in self.hvac_modes:
|
||||
return
|
||||
|
||||
if hvac_mode == HVACMode.AUTO:
|
||||
await self._device.set_control_mode_async(HMIP_AUTOMATIC_CM)
|
||||
|
||||
@@ -128,6 +128,7 @@ class HomematicipHAP:
|
||||
self.config_entry.data.get(HMIPC_AUTHTOKEN),
|
||||
self.config_entry.data.get(HMIPC_NAME),
|
||||
)
|
||||
|
||||
except HmipcConnectionError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except Exception as err: # noqa: BLE001
|
||||
@@ -210,41 +211,13 @@ class HomematicipHAP:
|
||||
for device in self.home.devices:
|
||||
device.fire_update_event()
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Start WebSocket connection."""
|
||||
tries = 0
|
||||
while True:
|
||||
retry_delay = 2 ** min(tries, 8)
|
||||
async def async_connect(self, home: AsyncHome) -> None:
|
||||
"""Connect to HomematicIP Cloud Websocket."""
|
||||
await home.enable_events()
|
||||
|
||||
try:
|
||||
await self.home.get_current_state_async()
|
||||
hmip_events = self.home.enable_events()
|
||||
self.home.set_on_connected_handler(self.ws_connected_handler)
|
||||
self.home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
tries = 0
|
||||
await hmip_events
|
||||
except HmipConnectionError:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error connecting to HomematicIP with HAP %s. "
|
||||
"Retrying in %d seconds"
|
||||
),
|
||||
self.config_entry.unique_id,
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
if self._ws_close_requested:
|
||||
break
|
||||
self._ws_close_requested = False
|
||||
tries += 1
|
||||
|
||||
try:
|
||||
self._retry_task = self.hass.async_create_task(
|
||||
asyncio.sleep(retry_delay)
|
||||
)
|
||||
await self._retry_task
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
home.set_on_connected_handler(self.ws_connected_handler)
|
||||
home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
home.set_on_reconnect_handler(self.ws_reconnected_handler)
|
||||
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
@@ -272,14 +245,22 @@ class HomematicipHAP:
|
||||
|
||||
async def ws_connected_handler(self) -> None:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.debug("WebSocket connection to HomematicIP established")
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
"""Handle websocket disconnection."""
|
||||
_LOGGER.warning("WebSocket connection to HomematicIP closed")
|
||||
_LOGGER.warning("Websocket connection to HomematicIP Cloud closed")
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
@@ -306,6 +287,6 @@ class HomematicipHAP:
|
||||
home.on_update(self.async_update)
|
||||
home.on_create(self.async_create_entity)
|
||||
|
||||
hass.loop.create_task(self.async_connect())
|
||||
await self.async_connect(home)
|
||||
|
||||
return home
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.4"]
|
||||
"requirements": ["homematicip==2.0.5"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from homematicip.group import HeatingGroup
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_validation import comp_entity_ids
|
||||
@@ -120,7 +120,8 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the HomematicIP Cloud services."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
|
||||
@@ -4,13 +4,14 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homematicip.base.enums import DeviceType
|
||||
from homematicip.base.enums import DeviceType, FunctionalChannelType
|
||||
from homematicip.device import (
|
||||
BrandSwitch2,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
FullFlushInputSwitch,
|
||||
HeatingSwitch2,
|
||||
MotionDetectorSwitchOutdoor,
|
||||
MultiIOBox,
|
||||
OpenCollector8Module,
|
||||
PlugableSwitch,
|
||||
@@ -47,18 +48,34 @@ async def async_setup_entry(
|
||||
and getattr(device, "deviceType", None) != DeviceType.BRAND_SWITCH_MEASURING
|
||||
):
|
||||
entities.append(HomematicipSwitchMeasuring(hap, device))
|
||||
elif isinstance(device, WiredSwitch8):
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
WiredSwitch8,
|
||||
OpenCollector8Module,
|
||||
BrandSwitch2,
|
||||
PrintedCircuitBoardSwitch2,
|
||||
HeatingSwitch2,
|
||||
MultiIOBox,
|
||||
MotionDetectorSwitchOutdoor,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
),
|
||||
):
|
||||
channel_indices = [
|
||||
ch.index
|
||||
for ch in device.functionalChannels
|
||||
if ch.functionalChannelType
|
||||
in (
|
||||
FunctionalChannelType.SWITCH_CHANNEL,
|
||||
FunctionalChannelType.MULTI_MODE_INPUT_SWITCH_CHANNEL,
|
||||
)
|
||||
]
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 9)
|
||||
)
|
||||
elif isinstance(device, DinRailSwitch):
|
||||
entities.append(HomematicipMultiSwitch(hap, device, channel=1))
|
||||
elif isinstance(device, DinRailSwitch4):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 5)
|
||||
for channel in channel_indices
|
||||
)
|
||||
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
@@ -68,24 +85,6 @@ async def async_setup_entry(
|
||||
),
|
||||
):
|
||||
entities.append(HomematicipSwitch(hap, device))
|
||||
elif isinstance(device, OpenCollector8Module):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 9)
|
||||
)
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
BrandSwitch2,
|
||||
PrintedCircuitBoardSwitch2,
|
||||
HeatingSwitch2,
|
||||
MultiIOBox,
|
||||
),
|
||||
):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 3)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -108,15 +107,15 @@ class HomematicipMultiSwitch(HomematicipGenericEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return self._device.functionalChannels[self._channel].on
|
||||
return self.functional_channel.on
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._device.turn_on_async(self._channel)
|
||||
await self.functional_channel.async_turn_on()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._device.turn_off_async(self._channel)
|
||||
await self.functional_channel.async_turn_off()
|
||||
|
||||
|
||||
class HomematicipSwitch(HomematicipMultiSwitch, SwitchEntity):
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==8.3.3"],
|
||||
"requirements": ["python-homewizard-energy==9.1.1"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
from aiohue import HueBridgeV1, HueBridgeV2
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
@@ -25,6 +25,7 @@ from .const import (
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for Hue integration."""
|
||||
|
||||
|
||||
@@ -10,7 +10,13 @@
|
||||
"description": "For the best experience with this integration both the `Authentication API` and the `Automower Connect API` should be connected. Please make sure that both of them are connected to your account in the [Husqvarna Developer Portal]({application_url})."
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import slugify
|
||||
|
||||
@@ -115,6 +115,7 @@ def _get_account(hass: HomeAssistant, account_identifier: str) -> IcloudAccount:
|
||||
return icloud_account
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register iCloud services."""
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/jewish_calendar",
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["hdate"],
|
||||
"requirements": ["hdate[astral]==1.1.1"],
|
||||
"requirements": ["hdate[astral]==1.1.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ INFO_SENSORS: tuple[JewishCalendarSensorDescription, ...] = (
|
||||
translation_key="weekly_portion",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options_fn=lambda _: [str(p) for p in Parasha],
|
||||
value_fn=lambda results: str(results.after_tzais_date.upcoming_shabbat.parasha),
|
||||
value_fn=lambda results: results.after_tzais_date.upcoming_shabbat.parasha,
|
||||
),
|
||||
JewishCalendarSensorDescription(
|
||||
key="holiday",
|
||||
@@ -98,17 +98,13 @@ INFO_SENSORS: tuple[JewishCalendarSensorDescription, ...] = (
|
||||
key="omer_count",
|
||||
translation_key="omer_count",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda results: (
|
||||
results.after_shkia_date.omer.total_days
|
||||
if results.after_shkia_date.omer
|
||||
else 0
|
||||
),
|
||||
value_fn=lambda results: results.after_shkia_date.omer.total_days,
|
||||
),
|
||||
JewishCalendarSensorDescription(
|
||||
key="daf_yomi",
|
||||
translation_key="daf_yomi",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda results: str(results.daytime_date.daf_yomi),
|
||||
value_fn=lambda results: results.daytime_date.daf_yomi,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -39,6 +40,7 @@ OMER_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Jewish Calendar services."""
|
||||
|
||||
|
||||
@@ -9,7 +9,13 @@
|
||||
}
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"manual_entry": {
|
||||
"data": {
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"requirements": ["pypck==0.8.6", "lcn-frontend==0.2.5"]
|
||||
"requirements": ["pypck==0.8.8", "lcn-frontend==0.2.5"]
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
@@ -438,6 +439,7 @@ SERVICES = (
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for LCN."""
|
||||
for service_name, service in SERVICES:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from letpot.deviceclient import LetPotDeviceClient
|
||||
@@ -42,6 +43,7 @@ class LetPotDeviceCoordinator(DataUpdateCoordinator[LetPotDeviceStatus]):
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"LetPot {device.serial_number}",
|
||||
update_interval=timedelta(minutes=10),
|
||||
)
|
||||
self._info = info
|
||||
self.device = device
|
||||
|
||||
@@ -5,9 +5,9 @@ rules:
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
status: done
|
||||
comment: |
|
||||
This integration only receives push-based updates.
|
||||
Primarily uses push, but polls with a long interval for availability and missed updates.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
@@ -39,7 +39,7 @@ rules:
|
||||
comment: |
|
||||
The integration does not have configuration options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import aiolifx_effects
|
||||
from aiolifx_themes.painter import ThemePainter
|
||||
@@ -31,9 +31,12 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import async_extract_referenced_entity_ids
|
||||
|
||||
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator, Light
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .util import convert_8_to_16, find_hsbk
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aiolifx.aiolifx import Light
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
SERVICE_EFFECT_COLORLOOP = "effect_colorloop"
|
||||
@@ -426,8 +429,8 @@ class LIFXManager:
|
||||
) -> None:
|
||||
"""Start the firmware-based Sky effect."""
|
||||
palette = kwargs.get(ATTR_PALETTE)
|
||||
theme = Theme()
|
||||
if palette is not None:
|
||||
theme = Theme()
|
||||
for hsbk in palette:
|
||||
theme.add_hsbk(hsbk[0], hsbk[1], hsbk[2], hsbk[3])
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["linkplay"],
|
||||
"requirements": ["python-linkplay==0.2.11"],
|
||||
"requirements": ["python-linkplay==0.2.12"],
|
||||
"zeroconf": ["_linkplay._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pylitterbot==2024.0.0"]
|
||||
"requirements": ["pylitterbot==2024.2.0"]
|
||||
}
|
||||
|
||||
@@ -36,11 +36,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PRODID = "-//homeassistant.io//local_calendar 1.0//EN"
|
||||
|
||||
# The calendar on disk is only changed when this entity is updated, so there
|
||||
# is no need to poll for changes. The calendar enttiy base class will handle
|
||||
# refreshing the entity state based on the start or end time of the event.
|
||||
SCAN_INTERVAL = timedelta(days=1)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -113,6 +113,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
"Toggle",
|
||||
"SingleSceneRaiseLower",
|
||||
"MasterRaiseLower",
|
||||
"AdvancedToggle",
|
||||
):
|
||||
# Associate an LED with a button if there is one
|
||||
led = next(
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -133,8 +133,6 @@ class MaxCubeClimate(ClimateEntity):
|
||||
self._set_target(MAX_DEVICE_MODE_MANUAL, temp)
|
||||
elif hvac_mode == HVACMode.AUTO:
|
||||
self._set_target(MAX_DEVICE_MODE_AUTOMATIC, None)
|
||||
else:
|
||||
raise ValueError(f"unsupported HVAC mode {hvac_mode}")
|
||||
|
||||
def _set_target(self, mode: int | None, temp: float | None) -> None:
|
||||
"""Set the mode and/or temperature of the thermostat.
|
||||
|
||||
@@ -12,10 +12,10 @@
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "Credentials"
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "The credentials to use for the OAuth2 flow"
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -88,7 +88,6 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=self.context(request),
|
||||
user_prompt=None,
|
||||
language="*",
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=None,
|
||||
|
||||
@@ -24,7 +24,7 @@ from .coordinator import (
|
||||
MealieShoppingListCoordinator,
|
||||
MealieStatisticsCoordinator,
|
||||
)
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
from .utils import create_version
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CALENDAR, Platform.SENSOR, Platform.TODO]
|
||||
@@ -34,7 +34,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Mealie component."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -98,9 +99,10 @@ SERVICE_SET_MEALPLAN_SCHEMA = vol.Any(
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> MealieConfigEntry:
|
||||
def _async_get_entry(call: ServiceCall) -> MealieConfigEntry:
|
||||
"""Get the Mealie config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
config_entry_id: str = call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
if not (entry := call.hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
@@ -115,143 +117,149 @@ def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> MealieConfigEn
|
||||
return cast(MealieConfigEntry, entry)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Mealie integration."""
|
||||
async def _async_get_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get the mealplan for a specific range."""
|
||||
entry = _async_get_entry(call)
|
||||
start_date = call.data.get(ATTR_START_DATE, date.today())
|
||||
end_date = call.data.get(ATTR_END_DATE, date.today())
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplans = await client.get_mealplans(start_date, end_date)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
return {"mealplan": [asdict(x) for x in mealplans.items]}
|
||||
|
||||
async def async_get_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get the mealplan for a specific range."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
start_date = call.data.get(ATTR_START_DATE, date.today())
|
||||
end_date = call.data.get(ATTR_END_DATE, date.today())
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplans = await client.get_mealplans(start_date, end_date)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
return {"mealplan": [asdict(x) for x in mealplans.items]}
|
||||
|
||||
async def async_get_recipe(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get a recipe."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
recipe_id = call.data[ATTR_RECIPE_ID]
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
recipe = await client.get_recipe(recipe_id)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
except MealieNotFoundError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="recipe_not_found",
|
||||
translation_placeholders={"recipe_id": recipe_id},
|
||||
) from err
|
||||
async def _async_get_recipe(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get a recipe."""
|
||||
entry = _async_get_entry(call)
|
||||
recipe_id = call.data[ATTR_RECIPE_ID]
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
recipe = await client.get_recipe(recipe_id)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
except MealieNotFoundError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="recipe_not_found",
|
||||
translation_placeholders={"recipe_id": recipe_id},
|
||||
) from err
|
||||
return {"recipe": asdict(recipe)}
|
||||
|
||||
|
||||
async def _async_import_recipe(call: ServiceCall) -> ServiceResponse:
|
||||
"""Import a recipe."""
|
||||
entry = _async_get_entry(call)
|
||||
url = call.data[ATTR_URL]
|
||||
include_tags = call.data.get(ATTR_INCLUDE_TAGS, False)
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
recipe = await client.import_recipe(url, include_tags)
|
||||
except MealieValidationError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="could_not_import_recipe",
|
||||
) from err
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"recipe": asdict(recipe)}
|
||||
return None
|
||||
|
||||
async def async_import_recipe(call: ServiceCall) -> ServiceResponse:
|
||||
"""Import a recipe."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
url = call.data[ATTR_URL]
|
||||
include_tags = call.data.get(ATTR_INCLUDE_TAGS, False)
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
recipe = await client.import_recipe(url, include_tags)
|
||||
except MealieValidationError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="could_not_import_recipe",
|
||||
) from err
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"recipe": asdict(recipe)}
|
||||
return None
|
||||
|
||||
async def async_set_random_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Set a random mealplan."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
mealplan_date = call.data[ATTR_DATE]
|
||||
entry_type = MealplanEntryType(call.data[ATTR_ENTRY_TYPE])
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplan = await client.random_mealplan(mealplan_date, entry_type)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"mealplan": asdict(mealplan)}
|
||||
return None
|
||||
async def _async_set_random_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Set a random mealplan."""
|
||||
entry = _async_get_entry(call)
|
||||
mealplan_date = call.data[ATTR_DATE]
|
||||
entry_type = MealplanEntryType(call.data[ATTR_ENTRY_TYPE])
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplan = await client.random_mealplan(mealplan_date, entry_type)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"mealplan": asdict(mealplan)}
|
||||
return None
|
||||
|
||||
async def async_set_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Set a mealplan."""
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
mealplan_date = call.data[ATTR_DATE]
|
||||
entry_type = MealplanEntryType(call.data[ATTR_ENTRY_TYPE])
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplan = await client.set_mealplan(
|
||||
mealplan_date,
|
||||
entry_type,
|
||||
recipe_id=call.data.get(ATTR_RECIPE_ID),
|
||||
note_title=call.data.get(ATTR_NOTE_TITLE),
|
||||
note_text=call.data.get(ATTR_NOTE_TEXT),
|
||||
)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"mealplan": asdict(mealplan)}
|
||||
return None
|
||||
|
||||
async def _async_set_mealplan(call: ServiceCall) -> ServiceResponse:
|
||||
"""Set a mealplan."""
|
||||
entry = _async_get_entry(call)
|
||||
mealplan_date = call.data[ATTR_DATE]
|
||||
entry_type = MealplanEntryType(call.data[ATTR_ENTRY_TYPE])
|
||||
client = entry.runtime_data.client
|
||||
try:
|
||||
mealplan = await client.set_mealplan(
|
||||
mealplan_date,
|
||||
entry_type,
|
||||
recipe_id=call.data.get(ATTR_RECIPE_ID),
|
||||
note_title=call.data.get(ATTR_NOTE_TITLE),
|
||||
note_text=call.data.get(ATTR_NOTE_TEXT),
|
||||
)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {"mealplan": asdict(mealplan)}
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Mealie integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_MEALPLAN,
|
||||
async_get_mealplan,
|
||||
_async_get_mealplan,
|
||||
schema=SERVICE_GET_MEALPLAN_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_RECIPE,
|
||||
async_get_recipe,
|
||||
_async_get_recipe,
|
||||
schema=SERVICE_GET_RECIPE_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_IMPORT_RECIPE,
|
||||
async_import_recipe,
|
||||
_async_import_recipe,
|
||||
schema=SERVICE_IMPORT_RECIPE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_RANDOM_MEALPLAN,
|
||||
async_set_random_mealplan,
|
||||
_async_set_random_mealplan,
|
||||
schema=SERVICE_SET_RANDOM_MEALPLAN_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_MEALPLAN,
|
||||
async_set_mealplan,
|
||||
_async_set_mealplan,
|
||||
schema=SERVICE_SET_MEALPLAN_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -8,7 +8,13 @@
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -62,6 +62,7 @@ TILT_DEVICE_MAP = {
|
||||
BlindType.VerticalBlind: CoverDeviceClass.BLIND,
|
||||
BlindType.VerticalBlindLeft: CoverDeviceClass.BLIND,
|
||||
BlindType.VerticalBlindRight: CoverDeviceClass.BLIND,
|
||||
BlindType.RollerTiltMotor: CoverDeviceClass.BLIND,
|
||||
}
|
||||
|
||||
TILT_ONLY_DEVICE_MAP = {
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["motionblinds"],
|
||||
"requirements": ["motionblinds==0.6.27"]
|
||||
"requirements": ["motionblinds==0.6.28"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user