forked from home-assistant/core
Compare commits
339 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| cfa4740973 | |||
| 97084e9382 | |||
| 9db34fe232 | |||
| c4f0b4ab23 | |||
| 1647afc58a | |||
| 53ea8422f8 | |||
| 0b988b3fac | |||
| 5a4abe3ec1 | |||
| 89abc5ac69 | |||
| 08fe6653bb | |||
| 9aa18c7157 | |||
| cc7929f8fb | |||
| d657298791 | |||
| 05f393560f | |||
| 92da640d4c | |||
| ad3fd151aa | |||
| cd104dc08c | |||
| d3745d2519 | |||
| 931f3fa41a | |||
| 87b5a91212 | |||
| 3b8da62d84 | |||
| 86a48294f4 | |||
| a03884981f | |||
| ab695f90c7 | |||
| efcf8f9555 | |||
| f71903a563 | |||
| 95552e9a5b | |||
| 5da57271b2 | |||
| 62a7139f4d | |||
| a7be26cd95 | |||
| 9c3b0952e0 | |||
| c771f446b4 | |||
| 9a25561017 | |||
| bd870f0537 | |||
| d7f43bddfa | |||
| 87107c5a59 | |||
| 9ce920b35a | |||
| 15aff9662c | |||
| da6fb91886 | |||
| 1e880f7406 | |||
| 81153042d3 | |||
| 493ca261dc | |||
| 7493b340ca | |||
| e85e60ed6a | |||
| 8ff4d5dcbf | |||
| f2838e493b | |||
| a71edcf1a1 | |||
| 47bef74e7c | |||
| b757a7e3fe | |||
| 362ff5724d | |||
| 4f8363a5c2 | |||
| ae3925118c | |||
| b2fcab20a6 | |||
| 6423957d29 | |||
| 835cdad0a9 | |||
| d8d6decb38 | |||
| 16b42cc109 | |||
| a47f27821f | |||
| c797e7a973 | |||
| 245eb64405 | |||
| a895fcf057 | |||
| 5706fb26b8 | |||
| 3f82120cdc | |||
| 20df183470 | |||
| 980216795f | |||
| fdfcd841ba | |||
| 28a09794e9 | |||
| a0c9217375 | |||
| 469176c59b | |||
| 3ece672890 | |||
| c6ebba8843 | |||
| 1f047807a4 | |||
| f1b724c49a | |||
| 5ebed2046c | |||
| d1236a53b8 | |||
| 84f07ee992 | |||
| 360bffa3a9 | |||
| 2214d9b330 | |||
| 6a2d733d85 | |||
| 7392d5a30a | |||
| b3deeca939 | |||
| c38a3a239c | |||
| afa6ed09ef | |||
| deb966128f | |||
| 73707fa231 | |||
| 10ac39f6b2 | |||
| 2e05dc8618 | |||
| d8233b4de5 | |||
| 7cbc3ea65f | |||
| 6f0a9910ea | |||
| b8793760a1 | |||
| 6264f9c67b | |||
| 2a74deb84e | |||
| 9d1ff37a79 | |||
| 2f99164781 | |||
| 80ef32f09d | |||
| 63be0e2e1a | |||
| 74c4553bb0 | |||
| e240707b32 | |||
| 7c867852a9 | |||
| 2d149dc746 | |||
| 7edcddd3e4 | |||
| 71f658b560 | |||
| 9886db5d6d | |||
| c236cd070c | |||
| 9f1a830d32 | |||
| 1e69ce9111 | |||
| 389297155d | |||
| c341b86520 | |||
| 88eef379b2 | |||
| 34767d4058 | |||
| 12c3d54a63 | |||
| 33a185dade | |||
| c1c5776d85 | |||
| eda642554d | |||
| 51f5ce013f | |||
| f7794ea6b5 | |||
| 7a1bea7ff5 | |||
| c7c645776d | |||
| 667cb772e9 | |||
| 933d008e52 | |||
| d868f39aea | |||
| 28d776a0b0 | |||
| b5d541b596 | |||
| 4948499889 | |||
| 7696b101f6 | |||
| fd2987a9fd | |||
| 4c1d32020a | |||
| b40bdab0ae | |||
| d192aecd3b | |||
| d1781f5766 | |||
| 2c4461457a | |||
| 82959081de | |||
| acdac6d5e8 | |||
| d3d7889883 | |||
| 60ece3e1c9 | |||
| a9f8529460 | |||
| ec53b61f9e | |||
| e9f02edd8b | |||
| d1b7898219 | |||
| 8dc21ef619 | |||
| d9f91598a5 | |||
| c540acf2bd | |||
| f702f3efcd | |||
| 9410061405 | |||
| 485b28d9ea | |||
| d59200a9f5 | |||
| 44a92ca81c | |||
| d39fa39a03 | |||
| 36ec857523 | |||
| fcb8cdc146 | |||
| 2322b0b65f | |||
| 87baaf4255 | |||
| b7f0e877f0 | |||
| 5d92a04732 | |||
| 8ff879df22 | |||
| 9fb7ee676e | |||
| 2c855a3986 | |||
| cdd4894e30 | |||
| 5f26226712 | |||
| 8baf61031d | |||
| e90ba40553 | |||
| b38016425f | |||
| ee5e3f7691 | |||
| 7af6a4f493 | |||
| c25f26a290 | |||
| 8d62cb60a6 | |||
| 4f799069ea | |||
| af708b78e0 | |||
| f46e659740 | |||
| 7bd517e6ff | |||
| e9abdab1f5 | |||
| 86eee4f041 | |||
| 9db60c830c | |||
| c43a4682b9 | |||
| 2a4996055a | |||
| 4643fc2c14 | |||
| 6410b90d82 | |||
| e5c00eceae | |||
| fe65579df8 | |||
| 281beecb05 | |||
| 7546b5d269 | |||
| 490e3201b9 | |||
| 04be575139 | |||
| 854cae7f12 | |||
| 109d20978f | |||
| f8d284ec4b | |||
| 06ebe0810f | |||
| 802ad2ff51 | |||
| 9070a8d579 | |||
| e8b2a3de8b | |||
| 39549d5dd4 | |||
| 0c19e47bd4 | |||
| 05507d77e3 | |||
| 94558e2d40 | |||
| 4f22fe8f7f | |||
| 9e7dfbb857 | |||
| 02d182239a | |||
| 4e0f581747 | |||
| 42d97d348c | |||
| 69380c85ca | |||
| b38c647830 | |||
| 2396fd1090 | |||
| aa4eb89eee | |||
| 1b1bc6af95 | |||
| f17003a79c | |||
| ec70e8b0cd | |||
| d888c70ff0 | |||
| f29444002e | |||
| fc66997a36 | |||
| 35513ae072 | |||
| cd363d48c3 | |||
| d47ef835d7 | |||
| 00177c699e | |||
| 11b0086a01 | |||
| ceb177f80e | |||
| fa3832fbd7 | |||
| 2b9c903429 | |||
| a7c43f9b49 | |||
| b428196149 | |||
| e23da1a90f | |||
| 3951c2ea66 | |||
| fee152654d | |||
| 51073c948c | |||
| 91438088a0 | |||
| 427e1abdae | |||
| 6e7ac45ac0 | |||
| 4b3b9ebc29 | |||
| 649d8638ed | |||
| 12c4152dbe | |||
| 8f9572bb05 | |||
| 6d022ff4e0 | |||
| c0c2edb90a | |||
| b014219fdd | |||
| 216b8ef400 | |||
| f2ccd46267 | |||
| e16ba27ce8 | |||
| 506526a6a2 | |||
| a88678cf42 | |||
| d0b61af7ec | |||
| 04f5315ab2 | |||
| 7f9e4ba39e | |||
| 06aaf188ea | |||
| 627f994872 | |||
| 9e81ec5aae | |||
| 69753fca1d | |||
| 7773cc121e | |||
| 3aa56936ad | |||
| e66416c23d | |||
| a592feae3d | |||
| fc0d71e891 | |||
| d4640f1d24 | |||
| 6fe158836e | |||
| 629c0087f4 | |||
| 363bd75129 | |||
| 7592d350a8 | |||
| 8ac8401b4e | |||
| eed075dbfa | |||
| 23dbdedfb6 | |||
| 85ad29e28e | |||
| 35fc81b038 | |||
| 5d45b84cd2 | |||
| 7766649304 | |||
| 07e9020dfa | |||
| f504a759e0 | |||
| 9927de4801 | |||
| 1244fc4682 | |||
| e77a1b12f7 | |||
| 5459daaa10 | |||
| 400131df78 | |||
| 28e1843ff9 | |||
| df777318d1 | |||
| 6ad5e9e89c | |||
| a0bd8deee9 | |||
| 405cbd6a00 | |||
| 3e0eb5ab2c | |||
| fad75a70b6 | |||
| d9720283df | |||
| 14eed1778b | |||
| 049aaa7e8b | |||
| 35717e8216 | |||
| 2a081abc18 | |||
| b7f29c7358 | |||
| 3bb6373df5 | |||
| e1b4edec50 | |||
| 147bee57e1 | |||
| fcdaea64da | |||
| d1512d46be | |||
| 0be7db6270 | |||
| 2af0282725 | |||
| ff458c8417 | |||
| cc93152ff0 | |||
| 9965f01609 | |||
| e9c76ce694 | |||
| 58ab7d350d | |||
| e4d6e20ebd | |||
| 45e273897a | |||
| d9ec7142d7 | |||
| e162499267 | |||
| 67f21429e3 | |||
| a0563f06c9 | |||
| e7c4fdc8bb | |||
| c490e350bc | |||
| e11409ef99 | |||
| 5c8e415a76 | |||
| e795fb9497 | |||
| d0afabb85c | |||
| 4f3e8e9b94 | |||
| 46c1cbbc9c | |||
| 8d9a4ea278 | |||
| 22c83e2393 | |||
| c83a75f6f9 | |||
| 841c727112 | |||
| d8c9655bfd | |||
| 942ed89cc4 | |||
| a1fe6b9cf3 | |||
| 2567181cc2 | |||
| 028e4f6029 | |||
| b82e1a9bef | |||
| 438f226c31 | |||
| 2f139e3cb1 | |||
| 5d75e96fbf | |||
| dcf2ec5c37 | |||
| 2431e1ba98 | |||
| 4ead108c15 | |||
| ec8363fa49 | |||
| e7ff0a3f8b | |||
| f4c0eb4189 | |||
| b1ee5a76e1 | |||
| 6b9e8c301b | |||
| 89c3266c7e | |||
| cff0a632e8 | |||
| e04d8557ae | |||
| ca6286f241 | |||
| 35bcc9d5af | |||
| 25b45ce867 | |||
| d568209bd5 | |||
| 8a43e8af9e | |||
| 785e5b2c16 |
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
@@ -463,6 +463,7 @@ homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
|
||||
Generated
+2
-1
@@ -1474,7 +1474,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
"name": "Internal temperature"
|
||||
},
|
||||
"last_self_test": {
|
||||
"name": "Last self test"
|
||||
"name": "Last self-test"
|
||||
},
|
||||
"last_transfer": {
|
||||
"name": "Last transfer"
|
||||
@@ -177,7 +177,7 @@
|
||||
"name": "Restore requirement"
|
||||
},
|
||||
"self_test_result": {
|
||||
"name": "Self test result"
|
||||
"name": "Self-test result"
|
||||
},
|
||||
"sensitivity": {
|
||||
"name": "Sensitivity"
|
||||
@@ -195,7 +195,7 @@
|
||||
"name": "Status"
|
||||
},
|
||||
"self_test_interval": {
|
||||
"name": "Self test interval"
|
||||
"name": "Self-test interval"
|
||||
},
|
||||
"time_left": {
|
||||
"name": "Time left"
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.5.0"]
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -20,9 +20,6 @@ import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -1276,33 +1273,19 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
# Synthesize audio and get URL
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
raise TextToSpeechError(
|
||||
code="tts-failed",
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(PipelineEventType.TTS_END, {"tts_output": tts_output})
|
||||
PipelineEvent(
|
||||
PipelineEventType.TTS_END,
|
||||
{
|
||||
"tts_output": {
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
def _capture_chunk(self, audio_bytes: bytes | None) -> None:
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
ATTR_NAME,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DEVICE_ID,
|
||||
@@ -27,6 +28,7 @@ from homeassistant.const import (
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
@@ -86,11 +88,9 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .config import AutomationConfig, ValidationStatus
|
||||
from .const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DEFAULT_INITIAL_STATE,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
|
||||
@@ -14,11 +14,15 @@ from homeassistant.components import blueprint
|
||||
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
|
||||
from homeassistant.config import config_per_platform, config_without_domain
|
||||
from homeassistant.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITION,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DESCRIPTION,
|
||||
CONF_ID,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -30,14 +34,10 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.yaml.input import UndefinedSubstitution
|
||||
|
||||
from .const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_HIDE_ENTITY,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
@@ -58,34 +58,9 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for automations."""
|
||||
|
||||
if not isinstance(value, dict):
|
||||
return value
|
||||
|
||||
# `trigger` has been renamed to `triggers`
|
||||
if CONF_TRIGGER in value:
|
||||
if CONF_TRIGGERS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
|
||||
)
|
||||
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
|
||||
|
||||
# `condition` has been renamed to `conditions`
|
||||
if CONF_CONDITION in value:
|
||||
if CONF_CONDITIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
|
||||
)
|
||||
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
|
||||
|
||||
# `action` has been renamed to `actions`
|
||||
if CONF_ACTION in value:
|
||||
if CONF_ACTIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
|
||||
)
|
||||
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
|
||||
|
||||
return value
|
||||
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
|
||||
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
|
||||
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
|
||||
import logging
|
||||
|
||||
CONF_ACTION = "action"
|
||||
CONF_ACTIONS = "actions"
|
||||
CONF_TRIGGER = "trigger"
|
||||
CONF_TRIGGERS = "triggers"
|
||||
CONF_TRIGGER_VARIABLES = "trigger_variables"
|
||||
DOMAIN = "automation"
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.28.0",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.44.0"
|
||||
"habluetooth==3.45.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,12 +9,14 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenphase import Envoy, EnvoyError, EnvoyTokenAuth
|
||||
from pyenphase.models.home import EnvoyInterfaceInformation
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -26,7 +28,7 @@ TOKEN_REFRESH_CHECK_INTERVAL = timedelta(days=1)
|
||||
STALE_TOKEN_THRESHOLD = timedelta(days=30).total_seconds()
|
||||
NOTIFICATION_ID = "enphase_envoy_notification"
|
||||
FIRMWARE_REFRESH_INTERVAL = timedelta(hours=4)
|
||||
|
||||
MAC_VERIFICATION_DELAY = timedelta(seconds=34)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -39,6 +41,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
envoy_serial_number: str
|
||||
envoy_firmware: str
|
||||
config_entry: EnphaseConfigEntry
|
||||
interface: EnvoyInterfaceInformation | None
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, envoy: Envoy, entry: EnphaseConfigEntry
|
||||
@@ -50,8 +53,10 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self.password = entry_data[CONF_PASSWORD]
|
||||
self._setup_complete = False
|
||||
self.envoy_firmware = ""
|
||||
self.interface = None
|
||||
self._cancel_token_refresh: CALLBACK_TYPE | None = None
|
||||
self._cancel_firmware_refresh: CALLBACK_TYPE | None = None
|
||||
self._cancel_mac_verification: CALLBACK_TYPE | None = None
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
@@ -121,6 +126,66 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
)
|
||||
|
||||
def _schedule_mac_verification(
|
||||
self, delay: timedelta = MAC_VERIFICATION_DELAY
|
||||
) -> None:
|
||||
"""Schedule one time job to verify envoy mac address."""
|
||||
self.async_cancel_mac_verification()
|
||||
self._cancel_mac_verification = async_call_later(
|
||||
self.hass,
|
||||
delay,
|
||||
self._async_verify_mac,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_verify_mac(self, now: datetime.datetime) -> None:
|
||||
"""Verify Envoy active interface mac address in background."""
|
||||
self.hass.async_create_background_task(
|
||||
self._async_fetch_and_compare_mac(), "{name} verify envoy mac address"
|
||||
)
|
||||
|
||||
async def _async_fetch_and_compare_mac(self) -> None:
|
||||
"""Get Envoy interface information and update mac in device connections."""
|
||||
interface: (
|
||||
EnvoyInterfaceInformation | None
|
||||
) = await self.envoy.interface_settings()
|
||||
if interface is None:
|
||||
_LOGGER.debug("%s: interface information returned None", self.name)
|
||||
return
|
||||
# remember interface information so diagnostics can include in report
|
||||
self.interface = interface
|
||||
|
||||
# Add to or update device registry connections as needed
|
||||
device_registry = dr.async_get(self.hass)
|
||||
envoy_device = device_registry.async_get_device(
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
self.envoy_serial_number,
|
||||
)
|
||||
}
|
||||
)
|
||||
if envoy_device is None:
|
||||
_LOGGER.error(
|
||||
"No envoy device found in device registry: %s %s",
|
||||
DOMAIN,
|
||||
self.envoy_serial_number,
|
||||
)
|
||||
return
|
||||
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, interface.mac)
|
||||
if connection in envoy_device.connections:
|
||||
_LOGGER.debug(
|
||||
"connection verified as existing: %s in %s", connection, self.name
|
||||
)
|
||||
return
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=envoy_device.id,
|
||||
new_connections={connection},
|
||||
)
|
||||
_LOGGER.debug("added connection: %s to %s", connection, self.name)
|
||||
|
||||
@callback
|
||||
def _async_mark_setup_complete(self) -> None:
|
||||
"""Mark setup as complete and setup firmware checks and token refresh if needed."""
|
||||
@@ -132,6 +197,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
FIRMWARE_REFRESH_INTERVAL,
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
self._schedule_mac_verification()
|
||||
self.async_cancel_token_refresh()
|
||||
if not isinstance(self.envoy.auth, EnvoyTokenAuth):
|
||||
return
|
||||
@@ -252,3 +318,10 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
if self._cancel_firmware_refresh:
|
||||
self._cancel_firmware_refresh()
|
||||
self._cancel_firmware_refresh = None
|
||||
|
||||
@callback
|
||||
def async_cancel_mac_verification(self) -> None:
|
||||
"""Cancel mac verification."""
|
||||
if self._cancel_mac_verification:
|
||||
self._cancel_mac_verification()
|
||||
self._cancel_mac_verification = None
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from attr import asdict
|
||||
@@ -63,6 +64,7 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
"/ivp/ensemble/generator",
|
||||
"/ivp/meters",
|
||||
"/ivp/meters/readings",
|
||||
"/home,",
|
||||
]
|
||||
|
||||
for end_point in end_points:
|
||||
@@ -146,11 +148,25 @@ async def async_get_config_entry_diagnostics(
|
||||
"inverters": envoy_data.inverters,
|
||||
"tariff": envoy_data.tariff,
|
||||
}
|
||||
# Add Envoy active interface information to report
|
||||
active_interface: dict[str, Any] = {}
|
||||
if coordinator.interface:
|
||||
active_interface = {
|
||||
"name": (interface := coordinator.interface).primary_interface,
|
||||
"interface type": interface.interface_type,
|
||||
"mac": interface.mac,
|
||||
"uses dhcp": interface.dhcp,
|
||||
"firmware build date": datetime.fromtimestamp(
|
||||
interface.software_build_epoch
|
||||
).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"envoy timezone": interface.timezone,
|
||||
}
|
||||
|
||||
envoy_properties: dict[str, Any] = {
|
||||
"envoy_firmware": envoy.firmware,
|
||||
"part_number": envoy.part_number,
|
||||
"envoy_model": envoy.envoy_model,
|
||||
"active interface": active_interface,
|
||||
"supported_features": [feature.name for feature in envoy.supported_features],
|
||||
"phase_mode": envoy.phase_mode,
|
||||
"phase_count": envoy.phase_count,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==1.26.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
"storage_mode": {
|
||||
"name": "Storage mode",
|
||||
"state": {
|
||||
"self_consumption": "Self consumption",
|
||||
"self_consumption": "Self-consumption",
|
||||
"backup": "Full backup",
|
||||
"savings": "Savings mode"
|
||||
}
|
||||
@@ -393,7 +393,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"unexpected_device": {
|
||||
"message": "Unexpected Envoy serial-number found at {host}; expected {expected_serial}, found {actual_serial}"
|
||||
"message": "Unexpected Envoy serial number found at {host}; expected {expected_serial}, found {actual_serial}"
|
||||
},
|
||||
"authentication_error": {
|
||||
"message": "Envoy authentication failure on {host}: {args}"
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==30.0.1",
|
||||
"aioesphomeapi==30.1.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==2.14.0"
|
||||
],
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/garages_amsterdam",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["odp-amsterdam==6.0.2"]
|
||||
"requirements": ["odp-amsterdam==6.1.1"]
|
||||
}
|
||||
|
||||
@@ -164,7 +164,7 @@
|
||||
"name": "Load consumption today (solar)"
|
||||
},
|
||||
"mix_self_consumption_today": {
|
||||
"name": "Self consumption today (solar + battery)"
|
||||
"name": "Self-consumption today (solar + battery)"
|
||||
},
|
||||
"mix_load_consumption_battery_today": {
|
||||
"name": "Load consumption today (battery)"
|
||||
@@ -173,7 +173,7 @@
|
||||
"name": "Import from grid today (load)"
|
||||
},
|
||||
"mix_last_update": {
|
||||
"name": "Last Data Update"
|
||||
"name": "Last data update"
|
||||
},
|
||||
"mix_import_from_grid_today_combined": {
|
||||
"name": "Import from grid today (load + charging)"
|
||||
|
||||
@@ -11,7 +11,7 @@ from aiohomeconnect.model.error import HomeConnectError
|
||||
from aiohomeconnect.model.program import Execution
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -366,16 +366,37 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
|
||||
appliance,
|
||||
desc,
|
||||
)
|
||||
self.set_options()
|
||||
|
||||
def set_options(self) -> None:
|
||||
"""Set the options for the entity."""
|
||||
self._attr_options = [
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP[program.key]
|
||||
for program in appliance.programs
|
||||
for program in self.appliance.programs
|
||||
if program.key != ProgramKey.UNKNOWN
|
||||
and (
|
||||
program.constraints is None
|
||||
or program.constraints.execution in desc.allowed_executions
|
||||
or program.constraints.execution
|
||||
in self.entity_description.allowed_executions
|
||||
)
|
||||
]
|
||||
|
||||
@callback
|
||||
def refresh_options(self) -> None:
|
||||
"""Refresh the options for the entity."""
|
||||
self.set_options()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_add_listener(
|
||||
self.refresh_options,
|
||||
(self.appliance.info.ha_id, EventKey.BSH_COMMON_APPLIANCE_CONNECTED),
|
||||
)
|
||||
)
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Set the program value."""
|
||||
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
|
||||
|
||||
@@ -40,6 +40,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.service_info.ssdp import (
|
||||
ATTR_UPNP_FRIENDLY_NAME,
|
||||
ATTR_UPNP_MANUFACTURER,
|
||||
ATTR_UPNP_MODEL_NAME,
|
||||
ATTR_UPNP_PRESENTATION_URL,
|
||||
ATTR_UPNP_SERIAL,
|
||||
ATTR_UPNP_UDN,
|
||||
@@ -276,11 +277,12 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if TYPE_CHECKING:
|
||||
assert discovery_info.ssdp_location
|
||||
url = url_normalize(
|
||||
discovery_info.upnp.get(
|
||||
ATTR_UPNP_PRESENTATION_URL,
|
||||
f"http://{urlparse(discovery_info.ssdp_location).hostname}/",
|
||||
)
|
||||
discovery_info.upnp.get(ATTR_UPNP_PRESENTATION_URL)
|
||||
or f"http://{urlparse(discovery_info.ssdp_location).hostname}/"
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
# url_normalize only returns None if passed None, and we don't do that
|
||||
assert url is not None
|
||||
|
||||
unique_id = discovery_info.upnp.get(
|
||||
ATTR_UPNP_SERIAL, discovery_info.upnp[ATTR_UPNP_UDN]
|
||||
@@ -308,8 +310,11 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {
|
||||
CONF_NAME: discovery_info.upnp.get(ATTR_UPNP_FRIENDLY_NAME)
|
||||
or "Huawei LTE"
|
||||
CONF_NAME: (
|
||||
discovery_info.upnp.get(ATTR_UPNP_MODEL_NAME)
|
||||
or discovery_info.upnp.get(ATTR_UPNP_FRIENDLY_NAME)
|
||||
or "Huawei LTE"
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"requirements": [
|
||||
"huawei-lte-api==1.11.0",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==2.2.0"
|
||||
"url-normalize==2.2.1"
|
||||
],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -233,6 +233,11 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
icon="mdi:antenna",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"ims": HuaweiSensorEntityDescription(
|
||||
key="ims",
|
||||
translation_key="ims",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"lac": HuaweiSensorEntityDescription(
|
||||
key="lac",
|
||||
translation_key="lac",
|
||||
@@ -271,6 +276,12 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nei_cellid": HuaweiSensorEntityDescription(
|
||||
key="nei_cellid",
|
||||
translation_key="nei_cellid",
|
||||
icon="mdi:antenna",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nrbler": HuaweiSensorEntityDescription(
|
||||
key="nrbler",
|
||||
translation_key="nrbler",
|
||||
@@ -423,6 +434,17 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
),
|
||||
"rxlev": HuaweiSensorEntityDescription(
|
||||
key="rxlev",
|
||||
translation_key="rxlev",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"sc": HuaweiSensorEntityDescription(
|
||||
key="sc",
|
||||
translation_key="sc",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"sinr": HuaweiSensorEntityDescription(
|
||||
key="sinr",
|
||||
translation_key="sinr",
|
||||
@@ -480,6 +502,12 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"wdlfreq": HuaweiSensorEntityDescription(
|
||||
key="wdlfreq",
|
||||
translation_key="wdlfreq",
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
}
|
||||
),
|
||||
#
|
||||
|
||||
@@ -132,6 +132,9 @@
|
||||
"enodeb_id": {
|
||||
"name": "eNodeB ID"
|
||||
},
|
||||
"ims": {
|
||||
"name": "IMS"
|
||||
},
|
||||
"lac": {
|
||||
"name": "LAC"
|
||||
},
|
||||
@@ -144,6 +147,9 @@
|
||||
"mode": {
|
||||
"name": "Mode"
|
||||
},
|
||||
"nei_cellid": {
|
||||
"name": "Neighbor cell ID"
|
||||
},
|
||||
"nrbler": {
|
||||
"name": "5G block error rate"
|
||||
},
|
||||
@@ -207,6 +213,12 @@
|
||||
"rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"rxlev": {
|
||||
"name": "Received signal level"
|
||||
},
|
||||
"sc": {
|
||||
"name": "Scrambling code"
|
||||
},
|
||||
"sinr": {
|
||||
"name": "SINR"
|
||||
},
|
||||
@@ -231,6 +243,9 @@
|
||||
"uplink_frequency": {
|
||||
"name": "Uplink frequency"
|
||||
},
|
||||
"wdlfreq": {
|
||||
"name": "WCDMA downlink frequency"
|
||||
},
|
||||
"sms_unread": {
|
||||
"name": "SMS unread"
|
||||
},
|
||||
|
||||
@@ -136,7 +136,7 @@
|
||||
"services": {
|
||||
"fetch": {
|
||||
"name": "Fetch message",
|
||||
"description": "Fetch an email message from the server.",
|
||||
"description": "Fetches an email message from the server.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "Entry",
|
||||
@@ -150,7 +150,7 @@
|
||||
},
|
||||
"seen": {
|
||||
"name": "Mark message as seen",
|
||||
"description": "Mark an email as seen.",
|
||||
"description": "Marks an email as seen.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "Entry",
|
||||
@@ -164,7 +164,7 @@
|
||||
},
|
||||
"move": {
|
||||
"name": "Move message",
|
||||
"description": "Move an email to a target folder.",
|
||||
"description": "Moves an email to a target folder.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "[%key:component::imap::services::seen::fields::entry::name%]",
|
||||
@@ -186,7 +186,7 @@
|
||||
},
|
||||
"delete": {
|
||||
"name": "Delete message",
|
||||
"description": "Delete an email.",
|
||||
"description": "Deletes an email.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "[%key:component::imap::services::seen::fields::entry::name%]",
|
||||
|
||||
@@ -159,10 +159,10 @@
|
||||
"name": "Monitoring grid power flow"
|
||||
},
|
||||
"monitoring_self_consumption": {
|
||||
"name": "Monitoring self consumption"
|
||||
"name": "Monitoring self-consumption"
|
||||
},
|
||||
"monitoring_self_sufficiency": {
|
||||
"name": "Monitoring self sufficiency"
|
||||
"name": "Monitoring self-sufficiency"
|
||||
},
|
||||
"monitoring_solar_production": {
|
||||
"name": "Monitoring solar production"
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"sensor_test": "mdi:thermometer-check",
|
||||
"central_heating": "mdi:radiator",
|
||||
"standby": "mdi:water-boiler-off",
|
||||
"off": "mdi:water-boiler-off",
|
||||
"postrun_boyler": "mdi:water-boiler-auto",
|
||||
"service": "mdi:progress-wrench",
|
||||
"tapwater": "mdi:faucet",
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["incomfort-client==0.6.7"]
|
||||
"requirements": ["incomfort-client==0.6.8"]
|
||||
}
|
||||
|
||||
@@ -119,13 +119,14 @@
|
||||
"sensor_test": "Sensor test",
|
||||
"central_heating": "Central heating",
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"postrun_boyler": "Post run boiler",
|
||||
"service": "Service",
|
||||
"tapwater": "Tap water",
|
||||
"postrun_ch": "Post run central heating",
|
||||
"boiler_int": "Boiler internal",
|
||||
"buffer": "Buffer",
|
||||
"sensor_fault_after_self_check_e0": "Sensor fault after self check",
|
||||
"sensor_fault_after_self_check_e0": "Sensor fault after self-check",
|
||||
"cv_temperature_too_high_e1": "Temperature too high",
|
||||
"s1_and_s2_interchanged_e2": "S1 and S2 interchanged",
|
||||
"no_flame_signal_e4": "No flame signal",
|
||||
|
||||
@@ -103,7 +103,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.INTENT_TURN_ON,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
description="Turns on/opens a device or entity",
|
||||
description="Turns on/opens a device or entity. For locks, this performs a 'lock' action. Use for requests like 'turn on', 'activate', 'enable', or 'lock'.",
|
||||
device_classes=ONOFF_DEVICE_CLASSES,
|
||||
),
|
||||
)
|
||||
@@ -113,7 +113,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.INTENT_TURN_OFF,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
description="Turns off/closes a device or entity",
|
||||
description="Turns off/closes a device or entity. For locks, this performs an 'unlock' action. Use for requests like 'turn off', 'deactivate', 'disable', or 'unlock'.",
|
||||
device_classes=ONOFF_DEVICE_CLASSES,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Any
|
||||
from pyecotrend_ista import KeycloakError, LoginError, PyEcotrendIsta, ServerError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_NAME, CONF_PASSWORD
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
@@ -93,7 +93,11 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
reauth_entry = (
|
||||
self._get_reauth_entry()
|
||||
if self.source == SOURCE_REAUTH
|
||||
else self._get_reconfigure_entry()
|
||||
)
|
||||
if user_input is not None:
|
||||
ista = PyEcotrendIsta(
|
||||
user_input[CONF_EMAIL],
|
||||
@@ -126,7 +130,7 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_update_reload_and_abort(reauth_entry, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
step_id="reauth_confirm" if self.source == SOURCE_REAUTH else "reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values={
|
||||
@@ -141,3 +145,5 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async_step_reconfigure = async_step_reauth_confirm
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "The login details correspond to a different account. Please re-authenticate to the previously configured account."
|
||||
"unique_id_mismatch": "The login details correspond to a different account. Please re-authenticate to the previously configured account.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -33,6 +34,18 @@
|
||||
"email": "[%key:component::ista_ecotrend::config::step::user::data_description::email%]",
|
||||
"password": "[%key:component::ista_ecotrend::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Update ista EcoTrend configuration",
|
||||
"description": "Update your credentials if you have changed your **ista EcoTrend** account email or password.",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "[%key:component::ista_ecotrend::config::step::user::data_description::email%]",
|
||||
"password": "[%key:component::ista_ecotrend::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -131,7 +131,7 @@ async def async_migrate_entry(
|
||||
return {"new_unique_id": new_unique_id}
|
||||
return None
|
||||
|
||||
if config_entry.version > 1:
|
||||
if config_entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
@@ -139,4 +139,9 @@ async def async_migrate_entry(
|
||||
await er.async_migrate_entries(hass, config_entry.entry_id, update_unique_id)
|
||||
hass.config_entries.async_update_entry(config_entry, version=2)
|
||||
|
||||
if config_entry.version == 2:
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_LANGUAGE] = config_entry.data[CONF_LANGUAGE][:2]
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data, version=3)
|
||||
|
||||
return True
|
||||
|
||||
@@ -91,7 +91,6 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity):
|
||||
location=self._location,
|
||||
candle_lighting_offset=self._candle_lighting_offset,
|
||||
havdalah_offset=self._havdalah_offset,
|
||||
language=self._language,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
||||
@@ -3,9 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, get_args
|
||||
import zoneinfo
|
||||
|
||||
from hdate.translator import Language
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
@@ -25,8 +26,9 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.selector import (
|
||||
BooleanSelector,
|
||||
LanguageSelector,
|
||||
LanguageSelectorConfig,
|
||||
LocationSelector,
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
@@ -43,11 +45,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
LANGUAGE = [
|
||||
SelectOptionDict(value="hebrew", label="Hebrew"),
|
||||
SelectOptionDict(value="english", label="English"),
|
||||
]
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_CANDLE_LIGHT_MINUTES, default=DEFAULT_CANDLE_LIGHT): int,
|
||||
@@ -72,8 +69,8 @@ async def _get_data_schema(hass: HomeAssistant) -> vol.Schema:
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DIASPORA, default=DEFAULT_DIASPORA): BooleanSelector(),
|
||||
vol.Required(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): SelectSelector(
|
||||
SelectSelectorConfig(options=LANGUAGE)
|
||||
vol.Required(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): LanguageSelector(
|
||||
LanguageSelectorConfig(languages=list(get_args(Language)))
|
||||
),
|
||||
vol.Optional(CONF_LOCATION, default=default_location): LocationSelector(),
|
||||
vol.Optional(CONF_ELEVATION, default=hass.config.elevation): int,
|
||||
@@ -87,7 +84,7 @@ async def _get_data_schema(hass: HomeAssistant) -> vol.Schema:
|
||||
class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Jewish calendar."""
|
||||
|
||||
VERSION = 2
|
||||
VERSION = 3
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
|
||||
@@ -13,6 +13,6 @@ DEFAULT_NAME = "Jewish Calendar"
|
||||
DEFAULT_CANDLE_LIGHT = 18
|
||||
DEFAULT_DIASPORA = False
|
||||
DEFAULT_HAVDALAH_OFFSET_MINUTES = 0
|
||||
DEFAULT_LANGUAGE = "english"
|
||||
DEFAULT_LANGUAGE = "en"
|
||||
|
||||
SERVICE_COUNT_OMER = "count_omer"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from hdate import Location
|
||||
from hdate.translator import Language
|
||||
from hdate.translator import Language, set_language
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
@@ -44,7 +44,7 @@ class JewishCalendarEntity(Entity):
|
||||
)
|
||||
data = config_entry.runtime_data
|
||||
self._location = data.location
|
||||
self._language = data.language
|
||||
self._candle_lighting_offset = data.candle_lighting_offset
|
||||
self._havdalah_offset = data.havdalah_offset
|
||||
self._diaspora = data.diaspora
|
||||
set_language(data.language)
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/jewish_calendar",
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["hdate"],
|
||||
"requirements": ["hdate[astral]==1.0.3"],
|
||||
"requirements": ["hdate[astral]==1.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -218,9 +218,7 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity):
|
||||
|
||||
_LOGGER.debug("Now: %s Sunset: %s", now, sunset)
|
||||
|
||||
daytime_date = HDateInfo(
|
||||
today, diaspora=self._diaspora, language=self._language
|
||||
)
|
||||
daytime_date = HDateInfo(today, diaspora=self._diaspora)
|
||||
|
||||
# The Jewish day starts after darkness (called "tzais") and finishes at
|
||||
# sunset ("shkia"). The time in between is a gray area
|
||||
@@ -253,7 +251,6 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity):
|
||||
location=self._location,
|
||||
candle_lighting_offset=self._candle_lighting_offset,
|
||||
havdalah_offset=self._havdalah_offset,
|
||||
language=self._language,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -272,7 +269,6 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity):
|
||||
# refers to "current" or "upcoming" dates.
|
||||
if self.entity_description.key == "date":
|
||||
hdate = after_shkia_date.hdate
|
||||
hdate.month.set_language(self._language)
|
||||
self._attrs = {
|
||||
"hebrew_year": str(hdate.year),
|
||||
"hebrew_month_name": str(hdate.month),
|
||||
@@ -290,9 +286,7 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity):
|
||||
dict.fromkeys(_holiday.type.name for _holiday in _holidays)
|
||||
)
|
||||
self._attrs = {"id": _id, "type": _type}
|
||||
self._attr_options = HolidayDatabase(self._diaspora).get_all_names(
|
||||
self._language
|
||||
)
|
||||
self._attr_options = HolidayDatabase(self._diaspora).get_all_names()
|
||||
return ", ".join(str(holiday) for holiday in _holidays) if _holidays else ""
|
||||
if self.entity_description.key == "omer_count":
|
||||
return after_shkia_date.omer.total_days if after_shkia_date.omer else 0
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Services for Jewish Calendar."""
|
||||
|
||||
import datetime
|
||||
from typing import cast
|
||||
from typing import get_args
|
||||
|
||||
from hdate import HebrewDate
|
||||
from hdate.omer import Nusach, Omer
|
||||
from hdate.translator import Language
|
||||
from hdate.translator import Language, set_language
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_LANGUAGE
|
||||
@@ -20,7 +20,6 @@ from homeassistant.helpers.selector import LanguageSelector, LanguageSelectorCon
|
||||
|
||||
from .const import ATTR_DATE, ATTR_NUSACH, DOMAIN, SERVICE_COUNT_OMER
|
||||
|
||||
SUPPORTED_LANGUAGES = {"en": "english", "fr": "french", "he": "hebrew"}
|
||||
OMER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DATE, default=datetime.date.today): cv.date,
|
||||
@@ -28,7 +27,7 @@ OMER_SCHEMA = vol.Schema(
|
||||
[nusach.name.lower() for nusach in Nusach]
|
||||
),
|
||||
vol.Required(CONF_LANGUAGE, default="he"): LanguageSelector(
|
||||
LanguageSelectorConfig(languages=list(SUPPORTED_LANGUAGES.keys()))
|
||||
LanguageSelectorConfig(languages=list(get_args(Language)))
|
||||
),
|
||||
}
|
||||
)
|
||||
@@ -41,12 +40,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Return the Omer blessing for a given date."""
|
||||
hebrew_date = HebrewDate.from_gdate(call.data["date"])
|
||||
nusach = Nusach[call.data["nusach"].upper()]
|
||||
|
||||
# Currently Omer only supports Hebrew, English, and French and requires
|
||||
# the full language name
|
||||
language = cast(Language, SUPPORTED_LANGUAGES[call.data[CONF_LANGUAGE]])
|
||||
|
||||
omer = Omer(date=hebrew_date, nusach=nusach, language=language)
|
||||
set_language(call.data[CONF_LANGUAGE])
|
||||
omer = Omer(date=hebrew_date, nusach=nusach)
|
||||
return {
|
||||
"message": str(omer.count_str()),
|
||||
"weeks": omer.week,
|
||||
|
||||
@@ -32,6 +32,7 @@ from .coordinator import (
|
||||
LaMarzoccoRuntimeData,
|
||||
LaMarzoccoScheduleUpdateCoordinator,
|
||||
LaMarzoccoSettingsUpdateCoordinator,
|
||||
LaMarzoccoStatisticsUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS = [
|
||||
@@ -140,12 +141,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
)
|
||||
|
||||
await asyncio.gather(
|
||||
coordinators.config_coordinator.async_config_entry_first_refresh(),
|
||||
coordinators.settings_coordinator.async_config_entry_first_refresh(),
|
||||
coordinators.schedule_coordinator.async_config_entry_first_refresh(),
|
||||
coordinators.statistics_coordinator.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
@@ -22,6 +22,7 @@ from .const import DOMAIN
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
SETTINGS_UPDATE_INTERVAL = timedelta(hours=1)
|
||||
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -32,6 +33,7 @@ class LaMarzoccoRuntimeData:
|
||||
config_coordinator: LaMarzoccoConfigUpdateCoordinator
|
||||
settings_coordinator: LaMarzoccoSettingsUpdateCoordinator
|
||||
schedule_coordinator: LaMarzoccoScheduleUpdateCoordinator
|
||||
statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator
|
||||
|
||||
|
||||
type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData]
|
||||
@@ -130,3 +132,14 @@ class LaMarzoccoScheduleUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.device.get_schedule()
|
||||
_LOGGER.debug("Current schedule: %s", self.device.schedule.to_dict())
|
||||
|
||||
|
||||
class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Coordinator for La Marzocco statistics."""
|
||||
|
||||
_default_update_interval = STATISTICS_UPDATE_INTERVAL
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.device.get_coffee_and_flush_counter()
|
||||
_LOGGER.debug("Current statistics: %s", self.device.statistics.to_dict())
|
||||
|
||||
@@ -81,6 +81,12 @@
|
||||
},
|
||||
"steam_boiler_ready_time": {
|
||||
"default": "mdi:av-timer"
|
||||
},
|
||||
"total_coffees_made": {
|
||||
"default": "mdi:coffee"
|
||||
},
|
||||
"total_flushes_done": {
|
||||
"default": "mdi:water-pump"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.0b3"]
|
||||
"requirements": ["pylamarzocco==2.0.0b6"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from pylamarzocco.const import ModelName, WidgetType
|
||||
from pylamarzocco.models import (
|
||||
BackFlush,
|
||||
BaseWidgetOutput,
|
||||
CoffeeAndFlushCounter,
|
||||
CoffeeBoiler,
|
||||
SteamBoilerLevel,
|
||||
SteamBoilerTemperature,
|
||||
@@ -18,6 +19,7 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -98,6 +100,31 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="drink_stats_coffee",
|
||||
translation_key="total_coffees_made",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=(
|
||||
lambda statistics: cast(
|
||||
CoffeeAndFlushCounter, statistics[WidgetType.COFFEE_AND_FLUSH_COUNTER]
|
||||
).total_coffee
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="drink_stats_flushing",
|
||||
translation_key="total_flushes_done",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=(
|
||||
lambda statistics: cast(
|
||||
CoffeeAndFlushCounter, statistics[WidgetType.COFFEE_AND_FLUSH_COUNTER]
|
||||
).total_flush
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -107,15 +134,21 @@ async def async_setup_entry(
|
||||
"""Set up sensor entities."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
|
||||
async_add_entities(
|
||||
entities = [
|
||||
LaMarzoccoSensorEntity(coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
]
|
||||
entities.extend(
|
||||
LaMarzoccoStatisticSensorEntity(coordinator, description)
|
||||
for description in STATISTIC_ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity):
|
||||
"""Sensor representing espresso machine water reservoir status."""
|
||||
"""Sensor for La Marzocco."""
|
||||
|
||||
entity_description: LaMarzoccoSensorEntityDescription
|
||||
|
||||
@@ -125,3 +158,14 @@ class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity):
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.device.dashboard.config
|
||||
)
|
||||
|
||||
|
||||
class LaMarzoccoStatisticSensorEntity(LaMarzoccoSensorEntity):
|
||||
"""Sensor for La Marzocco statistics."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime | None:
|
||||
"""Return the value of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.device.statistics.widgets
|
||||
)
|
||||
|
||||
@@ -147,6 +147,14 @@
|
||||
"steam_boiler_ready_time": {
|
||||
"name": "Steam boiler ready time"
|
||||
},
|
||||
"total_coffees_made": {
|
||||
"name": "Total coffees made",
|
||||
"unit_of_measurement": "coffees"
|
||||
},
|
||||
"total_flushes_done": {
|
||||
"name": "Total flushes done",
|
||||
"unit_of_measurement": "flushes"
|
||||
},
|
||||
"last_cleaning_time": {
|
||||
"name": "Last cleaning time"
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.0", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.1", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.0", "led-ble==1.1.7"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.1", "led-ble==1.1.7"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import CONTROLLER, CONTROLLER_KEY, DOMAIN, PLATFORMS
|
||||
from .const import DOMAIN, PLATFORMS, SHARED_DATA, LinkPlaySharedData
|
||||
from .utils import async_get_client_session
|
||||
|
||||
|
||||
@@ -44,11 +44,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) ->
|
||||
# setup the controller and discover multirooms
|
||||
controller: LinkPlayController | None = None
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if CONTROLLER not in hass.data[DOMAIN]:
|
||||
if SHARED_DATA not in hass.data[DOMAIN]:
|
||||
controller = LinkPlayController(session)
|
||||
hass.data[DOMAIN][CONTROLLER_KEY] = controller
|
||||
hass.data[DOMAIN][SHARED_DATA] = LinkPlaySharedData(controller, {})
|
||||
else:
|
||||
controller = hass.data[DOMAIN][CONTROLLER_KEY]
|
||||
controller = hass.data[DOMAIN][SHARED_DATA].controller
|
||||
|
||||
await controller.add_bridge(bridge)
|
||||
await controller.discover_multirooms()
|
||||
@@ -62,4 +62,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
# remove the bridge from the controller and discover multirooms
|
||||
bridge: LinkPlayBridge | None = entry.runtime_data.bridge
|
||||
controller: LinkPlayController = hass.data[DOMAIN][SHARED_DATA].controller
|
||||
await controller.remove_bridge(bridge)
|
||||
await controller.discover_multirooms()
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -1,12 +1,23 @@
|
||||
"""LinkPlay constants."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from linkplay.controller import LinkPlayController
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
|
||||
@dataclass
|
||||
class LinkPlaySharedData:
|
||||
"""Shared data for LinkPlay."""
|
||||
|
||||
controller: LinkPlayController
|
||||
entity_to_bridge: dict[str, str]
|
||||
|
||||
|
||||
DOMAIN = "linkplay"
|
||||
CONTROLLER = "controller"
|
||||
CONTROLLER_KEY: HassKey[LinkPlayController] = HassKey(CONTROLLER)
|
||||
SHARED_DATA = "shared_data"
|
||||
SHARED_DATA_KEY: HassKey[LinkPlaySharedData] = HassKey(SHARED_DATA)
|
||||
PLATFORMS = [Platform.BUTTON, Platform.MEDIA_PLAYER]
|
||||
DATA_SESSION = "session"
|
||||
|
||||
@@ -23,19 +23,14 @@ from homeassistant.components.media_player import (
|
||||
RepeatMode,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import LinkPlayConfigEntry, LinkPlayData
|
||||
from .const import CONTROLLER_KEY, DOMAIN
|
||||
from . import SHARED_DATA, LinkPlayConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import LinkPlayBaseEntity, exception_wrap
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -163,6 +158,13 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity):
|
||||
mode.value for mode in bridge.player.available_equalizer_modes
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle common setup when added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.hass.data[DOMAIN][SHARED_DATA].entity_to_bridge[self.entity_id] = (
|
||||
self._bridge.device.uuid
|
||||
)
|
||||
|
||||
@exception_wrap
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the media player."""
|
||||
@@ -276,62 +278,56 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity):
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
|
||||
controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY]
|
||||
controller: LinkPlayController = self.hass.data[DOMAIN][SHARED_DATA].controller
|
||||
multiroom = self._bridge.multiroom
|
||||
if multiroom is None:
|
||||
multiroom = LinkPlayMultiroom(self._bridge)
|
||||
|
||||
for group_member in group_members:
|
||||
bridge = self._get_linkplay_bridge(group_member)
|
||||
bridge = await self._get_linkplay_bridge(group_member)
|
||||
if bridge:
|
||||
await multiroom.add_follower(bridge)
|
||||
|
||||
await controller.discover_multirooms()
|
||||
|
||||
def _get_linkplay_bridge(self, entity_id: str) -> LinkPlayBridge:
|
||||
async def _get_linkplay_bridge(self, entity_id: str) -> LinkPlayBridge:
|
||||
"""Get linkplay bridge from entity_id."""
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
shared_data = self.hass.data[DOMAIN][SHARED_DATA]
|
||||
controller = shared_data.controller
|
||||
bridge_uuid = shared_data.entity_to_bridge.get(entity_id, None)
|
||||
bridge = await controller.find_bridge(bridge_uuid)
|
||||
|
||||
# Check for valid linkplay media_player entity
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
|
||||
if (
|
||||
entity_entry is None
|
||||
or entity_entry.domain != Platform.MEDIA_PLAYER
|
||||
or entity_entry.platform != DOMAIN
|
||||
or entity_entry.config_entry_id is None
|
||||
):
|
||||
if bridge is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_grouping_entity",
|
||||
translation_placeholders={"entity_id": entity_id},
|
||||
)
|
||||
|
||||
config_entry = self.hass.config_entries.async_get_entry(
|
||||
entity_entry.config_entry_id
|
||||
)
|
||||
assert config_entry
|
||||
|
||||
# Return bridge
|
||||
data: LinkPlayData = config_entry.runtime_data
|
||||
return data.bridge
|
||||
return bridge
|
||||
|
||||
@property
|
||||
def group_members(self) -> list[str]:
|
||||
"""List of players which are grouped together."""
|
||||
multiroom = self._bridge.multiroom
|
||||
if multiroom is not None:
|
||||
return [multiroom.leader.device.uuid] + [
|
||||
follower.device.uuid for follower in multiroom.followers
|
||||
]
|
||||
if multiroom is None:
|
||||
return []
|
||||
|
||||
return []
|
||||
shared_data = self.hass.data[DOMAIN][SHARED_DATA]
|
||||
|
||||
return [
|
||||
entity_id
|
||||
for entity_id, bridge in shared_data.entity_to_bridge.items()
|
||||
if bridge
|
||||
in [multiroom.leader.device.uuid]
|
||||
+ [follower.device.uuid for follower in multiroom.followers]
|
||||
]
|
||||
|
||||
@exception_wrap
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this player from any group."""
|
||||
controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY]
|
||||
controller: LinkPlayController = self.hass.data[DOMAIN][SHARED_DATA].controller
|
||||
|
||||
multiroom = self._bridge.multiroom
|
||||
if multiroom is not None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pymelcloud"],
|
||||
"requirements": ["pymelcloud==2.5.9"]
|
||||
"requirements": ["python-melcloud==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CLIMATE,
|
||||
Platform.FAN,
|
||||
Platform.LIGHT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
|
||||
@@ -9,15 +9,18 @@ ACTIONS = "actions"
|
||||
POWER_ON = "powerOn"
|
||||
POWER_OFF = "powerOff"
|
||||
PROCESS_ACTION = "processAction"
|
||||
DISABLED_TEMP_ENTITIES = (
|
||||
-32768 / 100,
|
||||
-32766 / 100,
|
||||
)
|
||||
VENTILATION_STEP = "ventilationStep"
|
||||
TARGET_TEMPERATURE = "targetTemperature"
|
||||
AMBIENT_LIGHT = "ambientLight"
|
||||
LIGHT = "light"
|
||||
LIGHT_ON = 1
|
||||
LIGHT_OFF = 2
|
||||
|
||||
DISABLED_TEMP_ENTITIES = (
|
||||
-32768 / 100,
|
||||
-32766 / 100,
|
||||
)
|
||||
|
||||
|
||||
class MieleAppliance(IntEnum):
|
||||
"""Define appliance types."""
|
||||
@@ -160,3 +163,933 @@ PROCESS_ACTIONS = {
|
||||
"start_supercooling": MieleActions.START_SUPERCOOL,
|
||||
"stop_supercooling": MieleActions.STOP_SUPERCOOL,
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_WASHING_MACHINE = {
|
||||
0: "not_running", # Returned by the API when the machine is switched off entirely.
|
||||
256: "not_running",
|
||||
257: "pre_wash",
|
||||
258: "soak",
|
||||
259: "pre_wash",
|
||||
260: "main_wash",
|
||||
261: "rinse",
|
||||
262: "rinse_hold",
|
||||
263: "cleaning",
|
||||
264: "cooling_down",
|
||||
265: "drain",
|
||||
266: "spin",
|
||||
267: "anti_crease",
|
||||
268: "finished",
|
||||
269: "venting",
|
||||
270: "starch_stop",
|
||||
271: "freshen_up_and_moisten",
|
||||
272: "steam_smoothing",
|
||||
279: "hygiene",
|
||||
280: "drying",
|
||||
285: "disinfecting",
|
||||
295: "steam_smoothing",
|
||||
65535: "not_running", # Seems to be default for some devices.
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_TUMBLE_DRYER = {
|
||||
0: "not_running",
|
||||
512: "not_running",
|
||||
513: "program_running",
|
||||
514: "drying",
|
||||
515: "machine_iron",
|
||||
516: "hand_iron_2",
|
||||
517: "normal",
|
||||
518: "normal_plus",
|
||||
519: "cooling_down",
|
||||
520: "hand_iron_1",
|
||||
521: "anti_crease",
|
||||
522: "finished",
|
||||
523: "extra_dry",
|
||||
524: "hand_iron",
|
||||
526: "moisten",
|
||||
527: "thermo_spin",
|
||||
528: "timed_drying",
|
||||
529: "warm_air",
|
||||
530: "steam_smoothing",
|
||||
531: "comfort_cooling",
|
||||
532: "rinse_out_lint",
|
||||
533: "rinses",
|
||||
535: "not_running",
|
||||
534: "smoothing",
|
||||
536: "not_running",
|
||||
537: "not_running",
|
||||
538: "slightly_dry",
|
||||
539: "safety_cooling",
|
||||
65535: "not_running",
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_DISHWASHER = {
|
||||
1792: "not_running",
|
||||
1793: "reactivating",
|
||||
1794: "pre_dishwash",
|
||||
1795: "main_dishwash",
|
||||
1796: "rinse",
|
||||
1797: "interim_rinse",
|
||||
1798: "final_rinse",
|
||||
1799: "drying",
|
||||
1800: "finished",
|
||||
1801: "pre_dishwash",
|
||||
65535: "not_running",
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE_OVEN = {
|
||||
0: "not_running",
|
||||
3073: "heating_up",
|
||||
3074: "process_running",
|
||||
3078: "process_finished",
|
||||
3084: "energy_save",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_WARMING_DRAWER = {
|
||||
0: "not_running",
|
||||
3075: "door_open",
|
||||
3094: "keeping_warm",
|
||||
3088: "cooling_down",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_MICROWAVE = {
|
||||
0: "not_running",
|
||||
3329: "heating",
|
||||
3330: "process_running",
|
||||
3334: "process_finished",
|
||||
3340: "energy_save",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_COFFEE_SYSTEM = {
|
||||
# Coffee system
|
||||
3073: "heating_up",
|
||||
4352: "not_running",
|
||||
4353: "espresso",
|
||||
4355: "milk_foam",
|
||||
4361: "dispensing",
|
||||
4369: "pre_brewing",
|
||||
4377: "grinding",
|
||||
4401: "2nd_grinding",
|
||||
4354: "hot_milk",
|
||||
4393: "2nd_pre_brewing",
|
||||
4385: "2nd_espresso",
|
||||
4404: "dispensing",
|
||||
4405: "rinse",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_ROBOT_VACUUM_CLEANER = {
|
||||
0: "not_running",
|
||||
5889: "vacuum_cleaning",
|
||||
5890: "returning",
|
||||
5891: "vacuum_cleaning_paused",
|
||||
5892: "going_to_target_area",
|
||||
5893: "wheel_lifted", # F1
|
||||
5894: "dirty_sensors", # F2
|
||||
5895: "dust_box_missing", # F3
|
||||
5896: "blocked_drive_wheels", # F4
|
||||
5897: "blocked_brushes", # F5
|
||||
5898: "motor_overload", # F6
|
||||
5899: "internal_fault", # F7
|
||||
5900: "blocked_front_wheel", # F8
|
||||
5903: "docked",
|
||||
5904: "docked",
|
||||
5910: "remote_controlled",
|
||||
65535: "not_running",
|
||||
}
|
||||
STATE_PROGRAM_PHASE_MICROWAVE_OVEN_COMBO = {
|
||||
0: "not_running",
|
||||
3863: "steam_reduction",
|
||||
7938: "process_running",
|
||||
7939: "waiting_for_start",
|
||||
7940: "heating_up_phase",
|
||||
7942: "process_finished",
|
||||
65535: "not_running",
|
||||
}
|
||||
|
||||
STATE_PROGRAM_PHASE: dict[int, dict[int, str]] = {
|
||||
MieleAppliance.WASHING_MACHINE: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_PROFESSIONAL: STATE_PROGRAM_PHASE_WASHING_MACHINE,
|
||||
MieleAppliance.TUMBLE_DRYER: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.DRYER_PROFESSIONAL: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_TUMBLE_DRYER,
|
||||
MieleAppliance.DISHWASHER: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.DISHWASHER_SEMI_PROFESSIONAL: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.DISHWASHER_PROFESSIONAL: STATE_PROGRAM_PHASE_DISHWASHER,
|
||||
MieleAppliance.OVEN: STATE_PROGRAM_PHASE_OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE: STATE_PROGRAM_PHASE_MICROWAVE_OVEN_COMBO,
|
||||
MieleAppliance.STEAM_OVEN: STATE_PROGRAM_PHASE_OVEN,
|
||||
MieleAppliance.DIALOG_OVEN: STATE_PROGRAM_PHASE_OVEN,
|
||||
MieleAppliance.MICROWAVE: STATE_PROGRAM_PHASE_MICROWAVE,
|
||||
MieleAppliance.COFFEE_SYSTEM: STATE_PROGRAM_PHASE_COFFEE_SYSTEM,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER: STATE_PROGRAM_PHASE_ROBOT_VACUUM_CLEANER,
|
||||
}
|
||||
|
||||
STATE_PROGRAM_TYPE = {
|
||||
0: "normal_operation_mode",
|
||||
1: "own_program",
|
||||
2: "automatic_program",
|
||||
3: "cleaning_care_program",
|
||||
4: "maintenance_program",
|
||||
}
|
||||
|
||||
WASHING_MACHINE_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Extrapolated from other device types.
|
||||
0: "no_program", # Returned by the API when no program is selected.
|
||||
1: "cottons",
|
||||
3: "minimum_iron",
|
||||
4: "delicates",
|
||||
8: "woollens",
|
||||
9: "silks",
|
||||
17: "starch",
|
||||
18: "rinse",
|
||||
21: "drain_spin",
|
||||
22: "curtains",
|
||||
23: "shirts",
|
||||
24: "denim",
|
||||
27: "proofing",
|
||||
29: "sportswear",
|
||||
31: "automatic_plus",
|
||||
37: "outerwear",
|
||||
39: "pillows",
|
||||
45: "cool_air", # washer-dryer
|
||||
46: "warm_air", # washer-dryer
|
||||
48: "rinse_out_lint", # washer-dryer
|
||||
50: "dark_garments",
|
||||
52: "separate_rinse_starch",
|
||||
53: "first_wash",
|
||||
69: "cottons_hygiene",
|
||||
75: "steam_care", # washer-dryer
|
||||
76: "freshen_up", # washer-dryer
|
||||
77: "trainers",
|
||||
91: "clean_machine",
|
||||
95: "down_duvets",
|
||||
122: "express_20",
|
||||
123: "denim",
|
||||
129: "down_filled_items",
|
||||
133: "cottons_eco",
|
||||
146: "quick_power_wash",
|
||||
190: "eco_40_60",
|
||||
}
|
||||
|
||||
DISHWASHER_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Sometimes returned by the API when the machine is switched off entirely, in conjunection with program phase 65535.
|
||||
0: "no_program", # Returned by the API when the machine is switched off entirely.
|
||||
1: "intensive",
|
||||
2: "maintenance",
|
||||
3: "eco",
|
||||
6: "automatic",
|
||||
7: "automatic",
|
||||
9: "solar_save",
|
||||
10: "gentle",
|
||||
11: "extra_quiet",
|
||||
12: "hygiene",
|
||||
13: "quick_power_wash",
|
||||
14: "pasta_paela",
|
||||
17: "tall_items",
|
||||
19: "glasses_warm",
|
||||
26: "intensive",
|
||||
27: "maintenance", # or maintenance_program?
|
||||
28: "eco",
|
||||
30: "normal",
|
||||
31: "automatic",
|
||||
32: "automatic", # sources disagree on ID
|
||||
34: "solar_save",
|
||||
35: "gentle",
|
||||
36: "extra_quiet",
|
||||
37: "hygiene",
|
||||
38: "quick_power_wash",
|
||||
42: "tall_items",
|
||||
44: "power_wash",
|
||||
}
|
||||
TUMBLE_DRYER_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Extrapolated from other device types.
|
||||
0: "no_program", # Extrapolated from other device types
|
||||
10: "automatic_plus",
|
||||
20: "cottons",
|
||||
23: "cottons_hygiene",
|
||||
30: "minimum_iron",
|
||||
31: "gentle_minimum_iron",
|
||||
40: "woollens_handcare",
|
||||
50: "delicates",
|
||||
60: "warm_air",
|
||||
70: "cool_air",
|
||||
80: "express",
|
||||
90: "cottons",
|
||||
100: "gentle_smoothing",
|
||||
120: "proofing",
|
||||
130: "denim",
|
||||
131: "gentle_denim",
|
||||
150: "sportswear",
|
||||
160: "outerwear",
|
||||
170: "silks_handcare",
|
||||
190: "standard_pillows",
|
||||
220: "basket_program",
|
||||
240: "smoothing",
|
||||
99001: "steam_smoothing",
|
||||
99002: "bed_linen",
|
||||
99003: "cottons_eco",
|
||||
99004: "shirts",
|
||||
99005: "large_pillows",
|
||||
}
|
||||
|
||||
OVEN_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Extrapolated from other device types.
|
||||
0: "no_program", # Extrapolated from other device types
|
||||
1: "defrost",
|
||||
6: "eco_fan_heat",
|
||||
7: "auto_roast",
|
||||
10: "full_grill",
|
||||
11: "economy_grill",
|
||||
13: "fan_plus",
|
||||
14: "intensive_bake",
|
||||
19: "microwave",
|
||||
24: "conventional_heat",
|
||||
25: "top_heat",
|
||||
29: "fan_grill",
|
||||
31: "bottom_heat",
|
||||
35: "moisture_plus_auto_roast",
|
||||
40: "moisture_plus_fan_plus",
|
||||
74: "moisture_plus_intensive_bake",
|
||||
76: "moisture_plus_conventional_heat",
|
||||
49: "moisture_plus_fan_plus",
|
||||
356: "defrost",
|
||||
357: "drying",
|
||||
358: "heat_crockery",
|
||||
361: "steam_cooking",
|
||||
362: "keeping_warm",
|
||||
512: "1_tray",
|
||||
513: "2_trays",
|
||||
529: "baking_tray",
|
||||
621: "prove_15_min",
|
||||
622: "prove_30_min",
|
||||
623: "prove_45_min",
|
||||
99001: "steam_bake",
|
||||
17003: "no_program",
|
||||
}
|
||||
DISH_WARMER_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program",
|
||||
0: "no_program",
|
||||
1: "warm_cups_glasses",
|
||||
2: "warm_dishes_plates",
|
||||
3: "keep_warm",
|
||||
4: "slow_roasting",
|
||||
}
|
||||
ROBOT_VACUUM_CLEANER_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Extrapolated from other device types
|
||||
0: "no_program", # Extrapolated from other device types
|
||||
1: "auto",
|
||||
2: "spot",
|
||||
3: "turbo",
|
||||
4: "silent",
|
||||
}
|
||||
COFFEE_SYSTEM_PROGRAM_ID: dict[int, str] = {
|
||||
-1: "no_program", # Extrapolated from other device types
|
||||
0: "no_program", # Extrapolated from other device types
|
||||
16016: "appliance_settings", # display brightness
|
||||
16018: "appliance_settings", # volume
|
||||
16019: "appliance_settings", # buttons volume
|
||||
16020: "appliance_settings", # child lock
|
||||
16021: "appliance_settings", # water hardness
|
||||
16027: "appliance_settings", # welcome sound
|
||||
16033: "appliance_settings", # connection status
|
||||
16035: "appliance_settings", # remote control
|
||||
16037: "appliance_settings", # remote update
|
||||
17004: "check_appliance",
|
||||
# profile 1
|
||||
24000: "ristretto",
|
||||
24001: "espresso",
|
||||
24002: "coffee",
|
||||
24003: "long_coffee",
|
||||
24004: "cappuccino",
|
||||
24005: "cappuccino_italiano",
|
||||
24006: "latte_macchiato",
|
||||
24007: "espresso_macchiato",
|
||||
24008: "cafe_au_lait",
|
||||
24009: "caffe_latte",
|
||||
24012: "flat_white",
|
||||
24013: "very_hot_water",
|
||||
24014: "hot_water",
|
||||
24015: "hot_milk",
|
||||
24016: "milk_foam",
|
||||
24017: "black_tea",
|
||||
24018: "herbal_tea",
|
||||
24019: "fruit_tea",
|
||||
24020: "green_tea",
|
||||
24021: "white_tea",
|
||||
24022: "japanese_tea",
|
||||
# profile 2
|
||||
24032: "ristretto",
|
||||
24033: "espresso",
|
||||
24034: "coffee",
|
||||
24035: "long_coffee",
|
||||
24036: "cappuccino",
|
||||
24037: "cappuccino_italiano",
|
||||
24038: "latte_macchiato",
|
||||
24039: "espresso_macchiato",
|
||||
24040: "cafe_au_lait",
|
||||
24041: "caffe_latte",
|
||||
24044: "flat_white",
|
||||
24045: "very_hot_water",
|
||||
24046: "hot_water",
|
||||
24047: "hot_milk",
|
||||
24048: "milk_foam",
|
||||
24049: "black_tea",
|
||||
24050: "herbal_tea",
|
||||
24051: "fruit_tea",
|
||||
24052: "green_tea",
|
||||
24053: "white_tea",
|
||||
24054: "japanese_tea",
|
||||
# profile 3
|
||||
24064: "ristretto",
|
||||
24065: "espresso",
|
||||
24066: "coffee",
|
||||
24067: "long_coffee",
|
||||
24068: "cappuccino",
|
||||
24069: "cappuccino_italiano",
|
||||
24070: "latte_macchiato",
|
||||
24071: "espresso_macchiato",
|
||||
24072: "cafe_au_lait",
|
||||
24073: "caffe_latte",
|
||||
24076: "flat_white",
|
||||
24077: "very_hot_water",
|
||||
24078: "hot_water",
|
||||
24079: "hot_milk",
|
||||
24080: "milk_foam",
|
||||
24081: "black_tea",
|
||||
24082: "herbal_tea",
|
||||
24083: "fruit_tea",
|
||||
24084: "green_tea",
|
||||
24085: "white_tea",
|
||||
24086: "japanese_tea",
|
||||
# profile 4
|
||||
24096: "ristretto",
|
||||
24097: "espresso",
|
||||
24098: "coffee",
|
||||
24099: "long_coffee",
|
||||
24100: "cappuccino",
|
||||
24101: "cappuccino_italiano",
|
||||
24102: "latte_macchiato",
|
||||
24103: "espresso_macchiato",
|
||||
24104: "cafe_au_lait",
|
||||
24105: "caffe_latte",
|
||||
24108: "flat_white",
|
||||
24109: "very_hot_water",
|
||||
24110: "hot_water",
|
||||
24111: "hot_milk",
|
||||
24112: "milk_foam",
|
||||
24113: "black_tea",
|
||||
24114: "herbal_tea",
|
||||
24115: "fruit_tea",
|
||||
24116: "green_tea",
|
||||
24117: "white_tea",
|
||||
24118: "japanese_tea",
|
||||
# profile 5
|
||||
24128: "ristretto",
|
||||
24129: "espresso",
|
||||
24130: "coffee",
|
||||
24131: "long_coffee",
|
||||
24132: "cappuccino",
|
||||
24133: "cappuccino_italiano",
|
||||
24134: "latte_macchiato",
|
||||
24135: "espresso_macchiato",
|
||||
24136: "cafe_au_lait",
|
||||
24137: "caffe_latte",
|
||||
24140: "flat_white",
|
||||
24141: "very_hot_water",
|
||||
24142: "hot_water",
|
||||
24143: "hot_milk",
|
||||
24144: "milk_foam",
|
||||
24145: "black_tea",
|
||||
24146: "herbal_tea",
|
||||
24147: "fruit_tea",
|
||||
24148: "green_tea",
|
||||
24149: "white_tea",
|
||||
24150: "japanese_tea",
|
||||
# special programs
|
||||
24400: "coffee_pot",
|
||||
24407: "barista_assistant",
|
||||
# machine settings menu
|
||||
24500: "appliance_settings", # total dispensed
|
||||
24502: "appliance_settings", # lights appliance on
|
||||
24503: "appliance_settings", # lights appliance off
|
||||
24504: "appliance_settings", # turn off lights after
|
||||
24506: "appliance_settings", # altitude
|
||||
24513: "appliance_settings", # performance mode
|
||||
24516: "appliance_settings", # turn off after
|
||||
24537: "appliance_settings", # advanced mode
|
||||
24542: "appliance_settings", # tea timer
|
||||
24549: "appliance_settings", # total coffee dispensed
|
||||
24550: "appliance_settings", # total tea dispensed
|
||||
24551: "appliance_settings", # total ristretto
|
||||
24552: "appliance_settings", # total cappuccino
|
||||
24553: "appliance_settings", # total espresso
|
||||
24554: "appliance_settings", # total coffee
|
||||
24555: "appliance_settings", # total long coffee
|
||||
24556: "appliance_settings", # total italian cappuccino
|
||||
24557: "appliance_settings", # total latte macchiato
|
||||
24558: "appliance_settings", # total caffe latte
|
||||
24560: "appliance_settings", # total espresso macchiato
|
||||
24562: "appliance_settings", # total flat white
|
||||
24563: "appliance_settings", # total coffee with milk
|
||||
24564: "appliance_settings", # total black tea
|
||||
24565: "appliance_settings", # total herbal tea
|
||||
24566: "appliance_settings", # total fruit tea
|
||||
24567: "appliance_settings", # total green tea
|
||||
24568: "appliance_settings", # total white tea
|
||||
24569: "appliance_settings", # total japanese tea
|
||||
24571: "appliance_settings", # total milk foam
|
||||
24572: "appliance_settings", # total hot milk
|
||||
24573: "appliance_settings", # total hot water
|
||||
24574: "appliance_settings", # total very hot water
|
||||
24575: "appliance_settings", # counter to descaling
|
||||
24576: "appliance_settings", # counter to brewing unit degreasing
|
||||
# maintenance
|
||||
24750: "appliance_rinse",
|
||||
24751: "descaling",
|
||||
24753: "brewing_unit_degrease",
|
||||
24754: "milk_pipework_rinse",
|
||||
24759: "appliance_rinse",
|
||||
24773: "appliance_rinse",
|
||||
24787: "appliance_rinse",
|
||||
24788: "appliance_rinse",
|
||||
24789: "milk_pipework_clean",
|
||||
# profiles settings menu
|
||||
24800: "appliance_settings", # add profile
|
||||
24801: "appliance_settings", # ask profile settings
|
||||
24813: "appliance_settings", # modify profile name
|
||||
}
|
||||
|
||||
STEAM_OVEN_MICRO_PROGRAM_ID: dict[int, str] = {
|
||||
8: "steam_cooking",
|
||||
19: "microwave",
|
||||
53: "popcorn",
|
||||
54: "quick_mw",
|
||||
72: "sous_vide",
|
||||
75: "eco_steam_cooking",
|
||||
77: "rapid_steam_cooking",
|
||||
326: "descale",
|
||||
330: "menu_cooking",
|
||||
2018: "reheating_with_steam",
|
||||
2019: "defrosting_with_steam",
|
||||
2020: "blanching",
|
||||
2021: "bottling",
|
||||
2022: "heat_crockery",
|
||||
2023: "prove_dough",
|
||||
2027: "soak",
|
||||
2029: "reheating_with_microwave",
|
||||
2030: "defrosting_with_microwave",
|
||||
2031: "artichokes_small",
|
||||
2032: "artichokes_medium",
|
||||
2033: "artichokes_large",
|
||||
2034: "eggplant_sliced",
|
||||
2035: "eggplant_diced",
|
||||
2036: "cauliflower_whole_small",
|
||||
2039: "cauliflower_whole_medium",
|
||||
2042: "cauliflower_whole_large",
|
||||
2046: "cauliflower_florets_small",
|
||||
2048: "cauliflower_florets_medium",
|
||||
2049: "cauliflower_florets_large",
|
||||
2051: "green_beans_whole",
|
||||
2052: "green_beans_cut",
|
||||
2053: "yellow_beans_whole",
|
||||
2054: "yellow_beans_cut",
|
||||
2055: "broad_beans",
|
||||
2056: "common_beans",
|
||||
2057: "runner_beans_whole",
|
||||
2058: "runner_beans_pieces",
|
||||
2059: "runner_beans_sliced",
|
||||
2060: "broccoli_whole_small",
|
||||
2061: "broccoli_whole_medium",
|
||||
2062: "broccoli_whole_large",
|
||||
2064: "broccoli_florets_small",
|
||||
2066: "broccoli_florets_medium",
|
||||
2068: "broccoli_florets_large",
|
||||
2069: "endive_halved",
|
||||
2070: "endive_quartered",
|
||||
2071: "endive_strips",
|
||||
2072: "chinese_cabbage_cut",
|
||||
2073: "peas",
|
||||
2074: "fennel_halved",
|
||||
2075: "fennel_quartered",
|
||||
2076: "fennel_strips",
|
||||
2077: "kale_cut",
|
||||
2080: "potatoes_in_the_skin_waxy_small_steam_cooking",
|
||||
2081: "potatoes_in_the_skin_waxy_small_rapid_steam_cooking",
|
||||
2083: "potatoes_in_the_skin_waxy_medium_steam_cooking",
|
||||
2084: "potatoes_in_the_skin_waxy_medium_rapid_steam_cooking",
|
||||
2086: "potatoes_in_the_skin_waxy_large_steam_cooking",
|
||||
2087: "potatoes_in_the_skin_waxy_large_rapid_steam_cooking",
|
||||
2088: "potatoes_in_the_skin_floury_small",
|
||||
2091: "potatoes_in_the_skin_floury_medium",
|
||||
2094: "potatoes_in_the_skin_floury_large",
|
||||
2097: "potatoes_in_the_skin_mainly_waxy_small",
|
||||
2100: "potatoes_in_the_skin_mainly_waxy_medium",
|
||||
2103: "potatoes_in_the_skin_mainly_waxy_large",
|
||||
2106: "potatoes_waxy_whole_small",
|
||||
2109: "potatoes_waxy_whole_medium",
|
||||
2112: "potatoes_waxy_whole_large",
|
||||
2115: "potatoes_waxy_halved",
|
||||
2116: "potatoes_waxy_quartered",
|
||||
2117: "potatoes_waxy_diced",
|
||||
2118: "potatoes_mainly_waxy_small",
|
||||
2119: "potatoes_mainly_waxy_medium",
|
||||
2120: "potatoes_mainly_waxy_large",
|
||||
2121: "potatoes_mainly_waxy_halved",
|
||||
2122: "potatoes_mainly_waxy_quartered",
|
||||
2123: "potatoes_mainly_waxy_diced",
|
||||
2124: "potatoes_floury_whole_small",
|
||||
2125: "potatoes_floury_whole_medium",
|
||||
2126: "potatoes_floury_whole_large",
|
||||
2127: "potatoes_floury_halved",
|
||||
2128: "potatoes_floury_quartered",
|
||||
2129: "potatoes_floury_diced",
|
||||
2130: "german_turnip_sliced",
|
||||
2131: "german_turnip_cut_into_batons",
|
||||
2132: "german_turnip_sliced",
|
||||
2133: "pumpkin_diced",
|
||||
2134: "corn_on_the_cob",
|
||||
2135: "mangel_cut",
|
||||
2136: "bunched_carrots_whole_small",
|
||||
2137: "bunched_carrots_whole_medium",
|
||||
2138: "bunched_carrots_whole_large",
|
||||
2139: "bunched_carrots_halved",
|
||||
2140: "bunched_carrots_quartered",
|
||||
2141: "bunched_carrots_diced",
|
||||
2142: "bunched_carrots_cut_into_batons",
|
||||
2143: "bunched_carrots_sliced",
|
||||
2144: "parisian_carrots_small",
|
||||
2145: "parisian_carrots_medium",
|
||||
2146: "parisian_carrots_large",
|
||||
2147: "carrots_whole_small",
|
||||
2148: "carrots_whole_medium",
|
||||
2149: "carrots_whole_large",
|
||||
2150: "carrots_halved",
|
||||
2151: "carrots_quartered",
|
||||
2152: "carrots_diced",
|
||||
2153: "carrots_cut_into_batons",
|
||||
2155: "carrots_sliced",
|
||||
2156: "pepper_halved",
|
||||
2157: "pepper_quartered",
|
||||
2158: "pepper_strips",
|
||||
2159: "pepper_diced",
|
||||
2160: "parsnip_sliced",
|
||||
2161: "parsnip_diced",
|
||||
2162: "parsnip_cut_into_batons",
|
||||
2163: "parsley_root_sliced",
|
||||
2164: "parsley_root_diced",
|
||||
2165: "parsley_root_cut_into_batons",
|
||||
2166: "leek_pieces",
|
||||
2167: "leek_rings",
|
||||
2168: "romanesco_whole_small",
|
||||
2169: "romanesco_whole_medium",
|
||||
2170: "romanesco_whole_large",
|
||||
2171: "romanesco_florets_small",
|
||||
2172: "romanesco_florets_medium",
|
||||
2173: "romanesco_florets_large",
|
||||
2175: "brussels_sprout",
|
||||
2176: "beetroot_whole_small",
|
||||
2177: "beetroot_whole_medium",
|
||||
2178: "beetroot_whole_large",
|
||||
2179: "red_cabbage_cut",
|
||||
2180: "black_salsify_thin",
|
||||
2181: "black_salsify_medium",
|
||||
2182: "black_salsify_thick",
|
||||
2183: "celery_pieces",
|
||||
2184: "celery_sliced",
|
||||
2185: "celeriac_sliced",
|
||||
2186: "celeriac_cut_into_batons",
|
||||
2187: "celeriac_diced",
|
||||
2188: "white_asparagus_thin",
|
||||
2189: "white_asparagus_medium",
|
||||
2190: "white_asparagus_thick",
|
||||
2192: "green_asparagus_thin",
|
||||
2194: "green_asparagus_medium",
|
||||
2196: "green_asparagus_thick",
|
||||
2197: "spinach",
|
||||
2198: "pointed_cabbage_cut",
|
||||
2199: "yam_halved",
|
||||
2200: "yam_quartered",
|
||||
2201: "yam_strips",
|
||||
2202: "swede_diced",
|
||||
2203: "swede_cut_into_batons",
|
||||
2204: "teltow_turnip_sliced",
|
||||
2205: "teltow_turnip_diced",
|
||||
2206: "jerusalem_artichoke_sliced",
|
||||
2207: "jerusalem_artichoke_diced",
|
||||
2208: "green_cabbage_cut",
|
||||
2209: "savoy_cabbage_cut",
|
||||
2210: "courgette_sliced",
|
||||
2211: "courgette_diced",
|
||||
2212: "snow_pea",
|
||||
2214: "perch_whole",
|
||||
2215: "perch_fillet_2_cm",
|
||||
2216: "perch_fillet_3_cm",
|
||||
2217: "gilt_head_bream_whole",
|
||||
2220: "gilt_head_bream_fillet",
|
||||
2221: "codfish_piece",
|
||||
2222: "codfish_fillet",
|
||||
2224: "trout",
|
||||
2225: "pike_fillet",
|
||||
2226: "pike_piece",
|
||||
2227: "halibut_fillet_2_cm",
|
||||
2230: "halibut_fillet_3_cm",
|
||||
2231: "codfish_fillet",
|
||||
2232: "codfish_piece",
|
||||
2233: "carp",
|
||||
2234: "salmon_fillet_2_cm",
|
||||
2235: "salmon_fillet_3_cm",
|
||||
2238: "salmon_steak_2_cm",
|
||||
2239: "salmon_steak_3_cm",
|
||||
2240: "salmon_piece",
|
||||
2241: "salmon_trout",
|
||||
2244: "iridescent_shark_fillet",
|
||||
2245: "red_snapper_fillet_2_cm",
|
||||
2248: "red_snapper_fillet_3_cm",
|
||||
2249: "redfish_fillet_2_cm",
|
||||
2250: "redfish_fillet_3_cm",
|
||||
2251: "redfish_piece",
|
||||
2252: "char",
|
||||
2253: "plaice_whole_2_cm",
|
||||
2254: "plaice_whole_3_cm",
|
||||
2255: "plaice_whole_4_cm",
|
||||
2256: "plaice_fillet_1_cm",
|
||||
2259: "plaice_fillet_2_cm",
|
||||
2260: "coalfish_fillet_2_cm",
|
||||
2261: "coalfish_fillet_3_cm",
|
||||
2262: "coalfish_piece",
|
||||
2263: "sea_devil_fillet_3_cm",
|
||||
2266: "sea_devil_fillet_4_cm",
|
||||
2267: "common_sole_fillet_1_cm",
|
||||
2270: "common_sole_fillet_2_cm",
|
||||
2271: "atlantic_catfish_fillet_1_cm",
|
||||
2272: "atlantic_catfish_fillet_2_cm",
|
||||
2273: "turbot_fillet_2_cm",
|
||||
2276: "turbot_fillet_3_cm",
|
||||
2277: "tuna_steak",
|
||||
2278: "tuna_fillet_2_cm",
|
||||
2279: "tuna_fillet_3_cm",
|
||||
2280: "tilapia_fillet_1_cm",
|
||||
2281: "tilapia_fillet_2_cm",
|
||||
2282: "nile_perch_fillet_2_cm",
|
||||
2283: "nile_perch_fillet_3_cm",
|
||||
2285: "zander_fillet",
|
||||
2288: "soup_hen",
|
||||
2291: "poularde_whole",
|
||||
2292: "poularde_breast",
|
||||
2294: "turkey_breast",
|
||||
2302: "chicken_tikka_masala_with_rice",
|
||||
2312: "veal_fillet_whole",
|
||||
2313: "veal_fillet_medaillons_1_cm",
|
||||
2315: "veal_fillet_medaillons_2_cm",
|
||||
2317: "veal_fillet_medaillons_3_cm",
|
||||
2324: "goulash_soup",
|
||||
2327: "dutch_hash",
|
||||
2328: "stuffed_cabbage",
|
||||
2330: "beef_tenderloin",
|
||||
2333: "beef_tenderloin_medaillons_1_cm_steam_cooking",
|
||||
2334: "beef_tenderloin_medaillons_2_cm_steam_cooking",
|
||||
2335: "beef_tenderloin_medaillons_3_cm_steam_cooking",
|
||||
2339: "silverside_5_cm",
|
||||
2342: "silverside_7_5_cm",
|
||||
2345: "silverside_10_cm",
|
||||
2348: "meat_for_soup_back_or_top_rib",
|
||||
2349: "meat_for_soup_leg_steak",
|
||||
2350: "meat_for_soup_brisket",
|
||||
2353: "viennese_silverside",
|
||||
2354: "whole_ham_steam_cooking",
|
||||
2355: "whole_ham_reheating",
|
||||
2359: "kasseler_piece",
|
||||
2361: "kasseler_slice",
|
||||
2363: "knuckle_of_pork_fresh",
|
||||
2364: "knuckle_of_pork_cured",
|
||||
2367: "pork_tenderloin_medaillons_3_cm",
|
||||
2368: "pork_tenderloin_medaillons_4_cm",
|
||||
2369: "pork_tenderloin_medaillons_5_cm",
|
||||
2429: "pumpkin_soup",
|
||||
2430: "meat_with_rice",
|
||||
2431: "beef_casserole",
|
||||
2450: "risotto",
|
||||
2451: "risotto",
|
||||
2453: "rice_pudding_steam_cooking",
|
||||
2454: "rice_pudding_rapid_steam_cooking",
|
||||
2461: "amaranth",
|
||||
2462: "bulgur",
|
||||
2463: "spelt_whole",
|
||||
2464: "spelt_cracked",
|
||||
2465: "green_spelt_whole",
|
||||
2466: "green_spelt_cracked",
|
||||
2467: "oats_whole",
|
||||
2468: "oats_cracked",
|
||||
2469: "millet",
|
||||
2470: "quinoa",
|
||||
2471: "polenta_swiss_style_fine_polenta",
|
||||
2472: "polenta_swiss_style_medium_polenta",
|
||||
2473: "polenta_swiss_style_coarse_polenta",
|
||||
2474: "polenta",
|
||||
2475: "rye_whole",
|
||||
2476: "rye_cracked",
|
||||
2477: "wheat_whole",
|
||||
2478: "wheat_cracked",
|
||||
2480: "gnocchi_fresh",
|
||||
2481: "yeast_dumplings_fresh",
|
||||
2482: "potato_dumplings_raw_boil_in_bag",
|
||||
2483: "potato_dumplings_raw_deep_frozen",
|
||||
2484: "potato_dumplings_half_half_boil_in_bag",
|
||||
2485: "potato_dumplings_half_half_deep_frozen",
|
||||
2486: "bread_dumplings_boil_in_the_bag",
|
||||
2487: "bread_dumplings_fresh",
|
||||
2488: "ravioli_fresh",
|
||||
2489: "spaetzle_fresh",
|
||||
2490: "tagliatelli_fresh",
|
||||
2491: "schupfnudeln_potato_noodels",
|
||||
2492: "tortellini_fresh",
|
||||
2493: "red_lentils",
|
||||
2494: "brown_lentils",
|
||||
2495: "beluga_lentils",
|
||||
2496: "green_split_peas",
|
||||
2497: "yellow_split_peas",
|
||||
2498: "chick_peas",
|
||||
2499: "white_beans",
|
||||
2500: "pinto_beans",
|
||||
2501: "red_beans",
|
||||
2502: "black_beans",
|
||||
2503: "hens_eggs_size_s_soft",
|
||||
2504: "hens_eggs_size_s_medium",
|
||||
2505: "hens_eggs_size_s_hard",
|
||||
2506: "hens_eggs_size_m_soft",
|
||||
2507: "hens_eggs_size_m_medium",
|
||||
2508: "hens_eggs_size_m_hard",
|
||||
2509: "hens_eggs_size_l_soft",
|
||||
2510: "hens_eggs_size_l_medium",
|
||||
2511: "hens_eggs_size_l_hard",
|
||||
2512: "hens_eggs_size_xl_soft",
|
||||
2513: "hens_eggs_size_xl_medium",
|
||||
2514: "hens_eggs_size_xl_hard",
|
||||
2515: "swiss_toffee_cream_100_ml",
|
||||
2516: "swiss_toffee_cream_150_ml",
|
||||
2518: "toffee_date_dessert_several_small",
|
||||
2520: "cheesecake_several_small",
|
||||
2521: "cheesecake_one_large",
|
||||
2522: "christmas_pudding_cooking",
|
||||
2523: "christmas_pudding_heating",
|
||||
2524: "treacle_sponge_pudding_several_small",
|
||||
2525: "treacle_sponge_pudding_one_large",
|
||||
2526: "sweet_cheese_dumplings",
|
||||
2527: "apples_whole",
|
||||
2528: "apples_halved",
|
||||
2529: "apples_quartered",
|
||||
2530: "apples_sliced",
|
||||
2531: "apples_diced",
|
||||
2532: "apricots_halved_steam_cooking",
|
||||
2533: "apricots_halved_skinning",
|
||||
2534: "apricots_quartered",
|
||||
2535: "apricots_wedges",
|
||||
2536: "pears_halved",
|
||||
2537: "pears_quartered",
|
||||
2538: "pears_wedges",
|
||||
2539: "sweet_cherries",
|
||||
2540: "sour_cherries",
|
||||
2541: "pears_to_cook_small_whole",
|
||||
2542: "pears_to_cook_small_halved",
|
||||
2543: "pears_to_cook_small_quartered",
|
||||
2544: "pears_to_cook_medium_whole",
|
||||
2545: "pears_to_cook_medium_halved",
|
||||
2546: "pears_to_cook_medium_quartered",
|
||||
2547: "pears_to_cook_large_whole",
|
||||
2548: "pears_to_cook_large_halved",
|
||||
2549: "pears_to_cook_large_quartered",
|
||||
2550: "mirabelles",
|
||||
2551: "nectarines_peaches_halved_steam_cooking",
|
||||
2552: "nectarines_peaches_halved_skinning",
|
||||
2553: "nectarines_peaches_quartered",
|
||||
2554: "nectarines_peaches_wedges",
|
||||
2555: "plums_whole",
|
||||
2556: "plums_halved",
|
||||
2557: "cranberries",
|
||||
2558: "quinces_diced",
|
||||
2559: "greenage_plums",
|
||||
2560: "rhubarb_chunks",
|
||||
2561: "gooseberries",
|
||||
2562: "mushrooms_whole",
|
||||
2563: "mushrooms_halved",
|
||||
2564: "mushrooms_sliced",
|
||||
2565: "mushrooms_quartered",
|
||||
2566: "mushrooms_diced",
|
||||
2567: "cep",
|
||||
2568: "chanterelle",
|
||||
2569: "oyster_mushroom_whole",
|
||||
2570: "oyster_mushroom_strips",
|
||||
2571: "oyster_mushroom_diced",
|
||||
2572: "saucisson",
|
||||
2573: "bruehwurst_sausages",
|
||||
2574: "bologna_sausage",
|
||||
2575: "veal_sausages",
|
||||
2577: "crevettes",
|
||||
2579: "prawns",
|
||||
2581: "king_prawns",
|
||||
2583: "small_shrimps",
|
||||
2585: "large_shrimps",
|
||||
2587: "mussels",
|
||||
2589: "scallops",
|
||||
2591: "venus_clams",
|
||||
2592: "goose_barnacles",
|
||||
2593: "cockles",
|
||||
2594: "razor_clams_small",
|
||||
2595: "razor_clams_medium",
|
||||
2596: "razor_clams_large",
|
||||
2597: "mussels_in_sauce",
|
||||
2598: "bottling_soft",
|
||||
2599: "bottling_medium",
|
||||
2600: "bottling_hard",
|
||||
2601: "melt_chocolate",
|
||||
2602: "dissolve_gelatine",
|
||||
2603: "sweat_onions",
|
||||
2604: "cook_bacon",
|
||||
2605: "heating_damp_flannels",
|
||||
2606: "decrystallise_honey",
|
||||
2607: "make_yoghurt",
|
||||
2687: "toffee_date_dessert_one_large",
|
||||
2694: "beef_tenderloin_medaillons_1_cm_low_temperature_cooking",
|
||||
2695: "beef_tenderloin_medaillons_2_cm_low_temperature_cooking",
|
||||
2696: "beef_tenderloin_medaillons_3_cm_low_temperature_cooking",
|
||||
3373: "wild_rice",
|
||||
3376: "wholegrain_rice",
|
||||
3380: "parboiled_rice_steam_cooking",
|
||||
3381: "parboiled_rice_rapid_steam_cooking",
|
||||
3383: "basmati_rice_steam_cooking",
|
||||
3384: "basmati_rice_rapid_steam_cooking",
|
||||
3386: "jasmine_rice_steam_cooking",
|
||||
3387: "jasmine_rice_rapid_steam_cooking",
|
||||
3389: "huanghuanian_steam_cooking",
|
||||
3390: "huanghuanian_rapid_steam_cooking",
|
||||
3392: "simiao_steam_cooking",
|
||||
3393: "simiao_rapid_steam_cooking",
|
||||
3395: "long_grain_rice_general_steam_cooking",
|
||||
3396: "long_grain_rice_general_rapid_steam_cooking",
|
||||
3398: "chongming_steam_cooking",
|
||||
3399: "chongming_rapid_steam_cooking",
|
||||
3401: "wuchang_steam_cooking",
|
||||
3402: "wuchang_rapid_steam_cooking",
|
||||
3404: "uonumma_koshihikari_steam_cooking",
|
||||
3405: "uonumma_koshihikari_rapid_steam_cooking",
|
||||
3407: "sheyang_steam_cooking",
|
||||
3408: "sheyang_rapid_steam_cooking",
|
||||
3410: "round_grain_rice_general_steam_cooking",
|
||||
3411: "round_grain_rice_general_rapid_steam_cooking",
|
||||
}
|
||||
|
||||
STATE_PROGRAM_ID: dict[int, dict[int, str]] = {
|
||||
MieleAppliance.WASHING_MACHINE: WASHING_MACHINE_PROGRAM_ID,
|
||||
MieleAppliance.TUMBLE_DRYER: TUMBLE_DRYER_PROGRAM_ID,
|
||||
MieleAppliance.DISHWASHER: DISHWASHER_PROGRAM_ID,
|
||||
MieleAppliance.DISH_WARMER: DISH_WARMER_PROGRAM_ID,
|
||||
MieleAppliance.OVEN: OVEN_PROGRAM_ID,
|
||||
MieleAppliance.OVEN_MICROWAVE: OVEN_PROGRAM_ID,
|
||||
MieleAppliance.STEAM_OVEN_MK2: OVEN_PROGRAM_ID,
|
||||
MieleAppliance.STEAM_OVEN: OVEN_PROGRAM_ID,
|
||||
MieleAppliance.STEAM_OVEN_COMBI: OVEN_PROGRAM_ID,
|
||||
MieleAppliance.STEAM_OVEN_MICRO: STEAM_OVEN_MICRO_PROGRAM_ID,
|
||||
MieleAppliance.WASHER_DRYER: WASHING_MACHINE_PROGRAM_ID,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER: ROBOT_VACUUM_CLEANER_PROGRAM_ID,
|
||||
MieleAppliance.COFFEE_SYSTEM: COFFEE_SYSTEM_PROGRAM_ID,
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ from __future__ import annotations
|
||||
import hashlib
|
||||
from typing import Any, cast
|
||||
|
||||
from pymiele import completed_warnings
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
@@ -32,7 +34,7 @@ async def async_get_config_entry_diagnostics(
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
miele_data = {
|
||||
miele_data: dict[str, Any] = {
|
||||
"devices": redact_identifiers(
|
||||
{
|
||||
device_id: device_data.raw
|
||||
@@ -46,6 +48,9 @@ async def async_get_config_entry_diagnostics(
|
||||
}
|
||||
),
|
||||
}
|
||||
miele_data["missing_code_warnings"] = (
|
||||
sorted(completed_warnings) if len(completed_warnings) > 0 else ["None"]
|
||||
)
|
||||
|
||||
return {
|
||||
"config_entry_data": async_redact_data(dict(config_entry.data), TO_REDACT),
|
||||
@@ -65,7 +70,7 @@ async def async_get_device_diagnostics(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_id = cast(str, device.serial_number)
|
||||
miele_data = {
|
||||
miele_data: dict[str, Any] = {
|
||||
"devices": {
|
||||
hash_identifier(device_id): coordinator.data.devices[device_id].raw
|
||||
},
|
||||
@@ -74,6 +79,10 @@ async def async_get_device_diagnostics(
|
||||
},
|
||||
"programs": "Not implemented",
|
||||
}
|
||||
miele_data["missing_code_warnings"] = (
|
||||
sorted(completed_warnings) if len(completed_warnings) > 0 else ["None"]
|
||||
)
|
||||
|
||||
return {
|
||||
"info": async_redact_data(info, TO_REDACT),
|
||||
"data": async_redact_data(config_entry.data, TO_REDACT),
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
"""Platform for Miele fan entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
from typing import Any, Final
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
|
||||
from homeassistant.components.fan import (
|
||||
FanEntity,
|
||||
FanEntityDescription,
|
||||
FanEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.percentage import (
|
||||
percentage_to_ranged_value,
|
||||
ranged_value_to_percentage,
|
||||
)
|
||||
from homeassistant.util.scaling import int_states_in_range
|
||||
|
||||
from .const import DOMAIN, POWER_OFF, POWER_ON, VENTILATION_STEP, MieleAppliance
|
||||
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
|
||||
from .entity import MieleEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SPEED_RANGE = (1, 4)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleFanDefinition:
|
||||
"""Class for defining fan entities."""
|
||||
|
||||
types: tuple[MieleAppliance, ...]
|
||||
description: FanEntityDescription
|
||||
|
||||
|
||||
FAN_TYPES: Final[tuple[MieleFanDefinition, ...]] = (
|
||||
MieleFanDefinition(
|
||||
types=(MieleAppliance.HOOD,),
|
||||
description=FanEntityDescription(
|
||||
key="fan",
|
||||
translation_key="fan",
|
||||
),
|
||||
),
|
||||
MieleFanDefinition(
|
||||
types=(MieleAppliance.HOB_INDUCT_EXTR,),
|
||||
description=FanEntityDescription(
|
||||
key="fan_readonly",
|
||||
translation_key="fan",
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MieleConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the fan platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
MieleFan(coordinator, device_id, definition.description)
|
||||
for device_id, device in coordinator.data.devices.items()
|
||||
for definition in FAN_TYPES
|
||||
if device.device_type in definition.types
|
||||
)
|
||||
|
||||
|
||||
class MieleFan(MieleEntity, FanEntity):
|
||||
"""Representation of a Fan."""
|
||||
|
||||
entity_description: FanEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: FanEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the fan."""
|
||||
|
||||
self._attr_supported_features: FanEntityFeature = (
|
||||
FanEntityFeature(0)
|
||||
if description.key == "fan_readonly"
|
||||
else FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
super().__init__(coordinator, device_id, description)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return current on/off state."""
|
||||
assert self.device.state_ventilation_step is not None
|
||||
return self.device.state_ventilation_step > 0
|
||||
|
||||
@property
|
||||
def speed_count(self) -> int:
|
||||
"""Return the number of speeds the fan supports."""
|
||||
return int_states_in_range(SPEED_RANGE)
|
||||
|
||||
@property
|
||||
def percentage(self) -> int | None:
|
||||
"""Return the current speed percentage."""
|
||||
return ranged_value_to_percentage(
|
||||
SPEED_RANGE,
|
||||
(self.device.state_ventilation_step or 0),
|
||||
)
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed percentage of the fan."""
|
||||
_LOGGER.debug("Set_percentage: %s", percentage)
|
||||
ventilation_step = math.ceil(
|
||||
percentage_to_ranged_value(SPEED_RANGE, percentage)
|
||||
)
|
||||
_LOGGER.debug("Calc ventilation_step: %s", ventilation_step)
|
||||
if ventilation_step == 0:
|
||||
await self.async_turn_off()
|
||||
else:
|
||||
try:
|
||||
await self.api.send_action(
|
||||
self._device_id, {VENTILATION_STEP: ventilation_step}
|
||||
)
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
self.device.state_ventilation_step = ventilation_step
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
preset_mode: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
_LOGGER.debug(
|
||||
"Turn_on -> percentage: %s, preset_mode: %s", percentage, preset_mode
|
||||
)
|
||||
try:
|
||||
await self.api.send_action(self._device_id, {POWER_ON: True})
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
|
||||
if percentage is not None:
|
||||
await self.async_set_percentage(percentage)
|
||||
return
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the fan off."""
|
||||
try:
|
||||
await self.api.send_action(self._device_id, {POWER_OFF: True})
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
|
||||
self.device.state_ventilation_step = 0
|
||||
self.async_write_ha_state()
|
||||
@@ -25,6 +25,35 @@
|
||||
"default": "mdi:pause"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"core_temperature": {
|
||||
"default": "mdi:thermometer-probe"
|
||||
},
|
||||
"core_target_temperature": {
|
||||
"default": "mdi:thermometer-probe"
|
||||
},
|
||||
"program_id": {
|
||||
"default": "mdi:selection-ellipse-arrow-inside"
|
||||
},
|
||||
"program_phase": {
|
||||
"default": "mdi:tray-full"
|
||||
},
|
||||
"elapsed_time": {
|
||||
"default": "mdi:timelapse"
|
||||
},
|
||||
"start_time": {
|
||||
"default": "mdi:clock-start"
|
||||
},
|
||||
"spin_speed": {
|
||||
"default": "mdi:sync"
|
||||
},
|
||||
"program_type": {
|
||||
"default": "mdi:state-machine"
|
||||
},
|
||||
"remaining_time": {
|
||||
"default": "mdi:clock-end"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"power": {
|
||||
"default": "mdi:power"
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pymiele==0.4.0"],
|
||||
"requirements": ["pymiele==0.4.1"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -15,17 +15,38 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
EntityCategory,
|
||||
UnitOfEnergy,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import STATE_STATUS_TAGS, MieleAppliance, StateStatus
|
||||
from .const import (
|
||||
STATE_PROGRAM_ID,
|
||||
STATE_PROGRAM_PHASE,
|
||||
STATE_PROGRAM_TYPE,
|
||||
STATE_STATUS_TAGS,
|
||||
MieleAppliance,
|
||||
StateStatus,
|
||||
)
|
||||
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
|
||||
from .entity import MieleEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DISABLED_TEMPERATURE = -32768
|
||||
|
||||
|
||||
def _convert_duration(value_list: list[int]) -> int | None:
|
||||
"""Convert duration to minutes."""
|
||||
return value_list[0] * 60 + value_list[1] if value_list else None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MieleSensorDescription(SensorEntityDescription):
|
||||
@@ -80,7 +101,220 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
translation_key="status",
|
||||
value_fn=lambda value: value.state_status,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(STATE_STATUS_TAGS.values()),
|
||||
options=sorted(set(STATE_STATUS_TAGS.values())),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.COFFEE_SYSTEM,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_program_id",
|
||||
translation_key="program_id",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda value: value.state_program_id,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.COFFEE_SYSTEM,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_program_phase",
|
||||
translation_key="program_phase",
|
||||
value_fn=lambda value: value.state_program_phase,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.COFFEE_SYSTEM,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_program_type",
|
||||
translation_key="program_type",
|
||||
value_fn=lambda value: value.state_program_type,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=sorted(set(STATE_PROGRAM_TYPE.values())),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="current_energy_consumption",
|
||||
translation_key="energy_consumption",
|
||||
value_fn=lambda value: value.current_energy_consumption,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="current_water_consumption",
|
||||
translation_key="water_consumption",
|
||||
value_fn=lambda value: value.current_water_consumption,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_spinning_speed",
|
||||
translation_key="spin_speed",
|
||||
value_fn=lambda value: value.state_spinning_speed,
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_remaining_time",
|
||||
translation_key="remaining_time",
|
||||
value_fn=lambda value: _convert_duration(value.state_remaining_time),
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_elapsed_time",
|
||||
translation_key="elapsed_time",
|
||||
value_fn=lambda value: _convert_duration(value.state_elapsed_time),
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.WASHING_MACHINE,
|
||||
MieleAppliance.WASHING_MACHINE_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.TUMBLE_DRYER,
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.DISHWASHER,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.WASHER_DRYER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_start_time",
|
||||
translation_key="start_time",
|
||||
value_fn=lambda value: _convert_duration(value.state_start_time),
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=2,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
@@ -113,6 +347,76 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
|
||||
/ 100.0,
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.TUMBLE_DRYER_SEMI_PROFESSIONAL,
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.DISH_WARMER,
|
||||
MieleAppliance.STEAM_OVEN,
|
||||
MieleAppliance.MICROWAVE,
|
||||
MieleAppliance.FRIDGE,
|
||||
MieleAppliance.FREEZER,
|
||||
MieleAppliance.FRIDGE_FREEZER,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.WINE_CABINET,
|
||||
MieleAppliance.WINE_CONDITIONING_UNIT,
|
||||
MieleAppliance.WINE_STORAGE_CONDITIONING_UNIT,
|
||||
MieleAppliance.STEAM_OVEN_MICRO,
|
||||
MieleAppliance.DIALOG_OVEN,
|
||||
MieleAppliance.WINE_CABINET_FREEZER,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_temperature_2",
|
||||
zone=2,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
translation_key="temperature_zone_2",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda value: value.state_temperatures[1].temperature / 100.0, # type: ignore [operator]
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_core_target_temperature",
|
||||
translation_key="core_target_temperature",
|
||||
zone=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda value: cast(
|
||||
int, value.state_core_target_temperature[0].temperature
|
||||
)
|
||||
/ 100.0
|
||||
),
|
||||
),
|
||||
),
|
||||
MieleSensorDefinition(
|
||||
types=(
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_core_temperature",
|
||||
translation_key="core_temperature",
|
||||
zone=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda value: cast(int, value.state_core_temperature[0].temperature)
|
||||
/ 100.0
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -132,8 +436,21 @@ async def async_setup_entry(
|
||||
match definition.description.key:
|
||||
case "state_status":
|
||||
entity_class = MieleStatusSensor
|
||||
case "state_program_id":
|
||||
entity_class = MieleProgramIdSensor
|
||||
case "state_program_phase":
|
||||
entity_class = MielePhaseSensor
|
||||
case "state_program_type":
|
||||
entity_class = MieleTypeSensor
|
||||
case _:
|
||||
entity_class = MieleSensor
|
||||
if (
|
||||
definition.description.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and definition.description.value_fn(device)
|
||||
== DISABLED_TEMPERATURE / 100
|
||||
):
|
||||
# Don't create entity if API signals that datapoint is disabled
|
||||
continue
|
||||
entities.append(
|
||||
entity_class(coordinator, device_id, definition.description)
|
||||
)
|
||||
@@ -209,3 +526,67 @@ class MieleStatusSensor(MieleSensor):
|
||||
"""Return the availability of the entity."""
|
||||
# This sensor should always be available
|
||||
return True
|
||||
|
||||
|
||||
class MielePhaseSensor(MieleSensor):
|
||||
"""Representation of the program phase sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
ret_val = STATE_PROGRAM_PHASE.get(self.device.device_type, {}).get(
|
||||
self.device.state_program_phase
|
||||
)
|
||||
if ret_val is None:
|
||||
_LOGGER.debug(
|
||||
"Unknown program phase: %s on device type: %s",
|
||||
self.device.state_program_phase,
|
||||
self.device.device_type,
|
||||
)
|
||||
return ret_val
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return the options list for the actual device type."""
|
||||
return sorted(
|
||||
set(STATE_PROGRAM_PHASE.get(self.device.device_type, {}).values())
|
||||
)
|
||||
|
||||
|
||||
class MieleTypeSensor(MieleSensor):
|
||||
"""Representation of the program type sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
ret_val = STATE_PROGRAM_TYPE.get(int(self.device.state_program_type))
|
||||
if ret_val is None:
|
||||
_LOGGER.debug(
|
||||
"Unknown program type: %s on device type: %s",
|
||||
self.device.state_program_type,
|
||||
self.device.device_type,
|
||||
)
|
||||
return ret_val
|
||||
|
||||
|
||||
class MieleProgramIdSensor(MieleSensor):
|
||||
"""Representation of the program id sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
ret_val = STATE_PROGRAM_ID.get(self.device.device_type, {}).get(
|
||||
self.device.state_program_id
|
||||
)
|
||||
if ret_val is None:
|
||||
_LOGGER.debug(
|
||||
"Unknown program id: %s on device type: %s",
|
||||
self.device.state_program_id,
|
||||
self.device.device_type,
|
||||
)
|
||||
return ret_val
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return the options list for the actual device type."""
|
||||
return sorted(set(STATE_PROGRAM_ID.get(self.device.device_type, {}).values()))
|
||||
|
||||
@@ -115,6 +115,15 @@
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"door": {
|
||||
"name": "Door"
|
||||
},
|
||||
"failure": {
|
||||
"name": "Failure"
|
||||
},
|
||||
"info": {
|
||||
"name": "Info"
|
||||
},
|
||||
"notification_active": {
|
||||
"name": "Notification active"
|
||||
},
|
||||
@@ -139,6 +148,11 @@
|
||||
"name": "[%key:common::action::pause%]"
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"fan": {
|
||||
"name": "[%key:component::fan::title%]"
|
||||
}
|
||||
},
|
||||
"light": {
|
||||
"ambient_light": {
|
||||
"name": "Ambient light"
|
||||
@@ -168,6 +182,647 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"elapsed_time": {
|
||||
"name": "Elapsed time"
|
||||
},
|
||||
"remaining_time": {
|
||||
"name": "Remaining time"
|
||||
},
|
||||
"start_time": {
|
||||
"name": "Start in"
|
||||
},
|
||||
"energy_consumption": {
|
||||
"name": "Energy consumption"
|
||||
},
|
||||
"program_phase": {
|
||||
"name": "Program phase",
|
||||
"state": {
|
||||
"2nd_espresso": "2nd espresso coffee",
|
||||
"2nd_grinding": "2nd grinding",
|
||||
"2nd_pre_brewing": "2nd pre-brewing",
|
||||
"anti_crease": "Anti-crease",
|
||||
"blocked_brushes": "Brushes blocked",
|
||||
"blocked_drive_wheels": "Drive wheels blocked",
|
||||
"blocked_front_wheel": "Front wheel blocked",
|
||||
"cleaning": "Cleaning",
|
||||
"comfort_cooling": "Comfort cooling",
|
||||
"cooling_down": "Cooling down",
|
||||
"dirty_sensors": "Dirty sensors",
|
||||
"disinfecting": "Disinfecting",
|
||||
"dispensing": "Dispensing",
|
||||
"docked": "Docked",
|
||||
"door_open": "Door open",
|
||||
"drain": "Drain",
|
||||
"drying": "Drying",
|
||||
"dust_box_missing": "Missing dust box",
|
||||
"energy_save": "Energy save",
|
||||
"espresso": "Espresso coffee",
|
||||
"extra_dry": "Extra dry",
|
||||
"final_rinse": "Final rinse",
|
||||
"finished": "Finished",
|
||||
"freshen_up_and_moisten": "Freshen up & moisten",
|
||||
"going_to_target_area": "Going to target area",
|
||||
"grinding": "Grinding",
|
||||
"hand_iron": "Hand iron",
|
||||
"hand_iron_1": "Hand iron 1",
|
||||
"hand_iron_2": "Hand iron 2",
|
||||
"heating": "Heating",
|
||||
"heating_up": "Heating up",
|
||||
"heating_up_phase": "Heating up phase",
|
||||
"hot_milk": "Hot milk",
|
||||
"hygiene": "Hygiene",
|
||||
"interim_rinse": "Interim rinse",
|
||||
"keep_warm": "Keep warm",
|
||||
"keeping_warm": "Keeping warm",
|
||||
"machine_iron": "Machine iron",
|
||||
"main_dishwash": "Cleaning",
|
||||
"main_wash": "Main wash",
|
||||
"milk_foam": "Milk foam",
|
||||
"moisten": "Moisten",
|
||||
"motor_overload": "Check dust box and filter",
|
||||
"normal": "Normal",
|
||||
"normal_plus": "Normal plus",
|
||||
"not_running": "Not running",
|
||||
"pre_brewing": "Pre-brewing",
|
||||
"pre_dishwash": "Pre-cleaning",
|
||||
"pre_wash": "Pre-wash",
|
||||
"process_finished": "Process finished",
|
||||
"process_running": "Process running",
|
||||
"program_running": "Program running",
|
||||
"reactivating": "Reactivating",
|
||||
"remote_controlled": "Remote controlled",
|
||||
"returning": "Returning",
|
||||
"rinse": "Rinse",
|
||||
"rinse_hold": "Rinse hold",
|
||||
"rinse_out_lint": "Rinse out lint",
|
||||
"rinses": "Rinses",
|
||||
"safety_cooling": "Safety cooling",
|
||||
"slightly_dry": "Slightly dry",
|
||||
"slow_roasting": "Slow roasting",
|
||||
"smoothing": "Smoothing",
|
||||
"soak": "Soak",
|
||||
"spin": "Spin",
|
||||
"starch_stop": "Starch stop",
|
||||
"steam_reduction": "Steam reduction",
|
||||
"steam_smoothing": "Steam smoothing",
|
||||
"thermo_spin": "Thermo spin",
|
||||
"timed_drying": "Timed drying",
|
||||
"vacuum_cleaning": "Cleaning",
|
||||
"vacuum_cleaning_paused": "Cleaning paused",
|
||||
"vacuum_internal_fault": "Internal fault - reboot",
|
||||
"venting": "Venting",
|
||||
"waiting_for_start": "Waiting for start",
|
||||
"warm_air": "Warm air",
|
||||
"warm_cups_glasses": "Warm cups/glasses",
|
||||
"warm_dishes_plates": "Warm dishes/plates",
|
||||
"wheel_lifted": "Wheel lifted"
|
||||
}
|
||||
},
|
||||
"program_type": {
|
||||
"name": "Program type",
|
||||
"state": {
|
||||
"automatic_program": "Automatic program",
|
||||
"cleaning_care_program": "Cleaning/care program",
|
||||
"maintenance_program": "Maintenance program",
|
||||
"normal_operation_mode": "Normal operation mode",
|
||||
"own_program": "Own program"
|
||||
}
|
||||
},
|
||||
"program_id": {
|
||||
"name": "Program",
|
||||
"state": {
|
||||
"1_tray": "1 tray",
|
||||
"2_trays": "2 trays",
|
||||
"amaranth": "Amaranth",
|
||||
"apples_diced": "Apples (diced)",
|
||||
"apples_halved": "Apples (halved)",
|
||||
"apples_quartered": "Apples (quartered)",
|
||||
"apples_sliced": "Apples (sliced)",
|
||||
"apples_whole": "Apples (whole)",
|
||||
"appliance_rinse": "Appliance rinse",
|
||||
"appliance_settings": "Appliance settings menu",
|
||||
"apricots_halved_skinning": "Apricots (halved, skinning)",
|
||||
"apricots_halved_steam_cooking": "Apricots (halved, steam cooking)",
|
||||
"apricots_quartered": "Apricots (quartered)",
|
||||
"apricots_wedges": "Apricots (wedges)",
|
||||
"artichokes_large": "Artichokes large",
|
||||
"artichokes_medium": "Artichokes medium",
|
||||
"artichokes_small": "Artichokes small",
|
||||
"atlantic_catfish_fillet_1_cm": "Atlantic catfish (fillet, 1 cm)",
|
||||
"atlantic_catfish_fillet_2_cm": "Atlantic catfish (fillet, 2 cm)",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"auto_roast": "Auto roast",
|
||||
"automatic": "Automatic",
|
||||
"automatic_plus": "Automatic plus",
|
||||
"baking_tray": "Baking tray",
|
||||
"barista_assistant": "BaristaAssistant",
|
||||
"basket_program": "Basket program",
|
||||
"basmati_rice_rapid_steam_cooking": "Basmati rice (rapid steam cooking)",
|
||||
"basmati_rice_steam_cooking": "Basmati rice (steam cooking)",
|
||||
"bed_linen": "Bed linen",
|
||||
"beef_casserole": "Beef casserole",
|
||||
"beef_tenderloin": "Beef tenderloin",
|
||||
"beef_tenderloin_medaillons_1_cm_low_temperature_cooking": "Beef tenderloin (medaillons, 1 cm, low-temperature cooking)",
|
||||
"beef_tenderloin_medaillons_1_cm_steam_cooking": "Beef tenderloin (medaillons, 1 cm, steam cooking)",
|
||||
"beef_tenderloin_medaillons_2_cm_low_temperature_cooking": "Beef tenderloin (medaillons, 2 cm, low-temperature cooking)",
|
||||
"beef_tenderloin_medaillons_2_cm_steam_cooking": "Beef tenderloin (medaillons, 2 cm, steam cooking)",
|
||||
"beef_tenderloin_medaillons_3_cm_low_temperature_cooking": "Beef tenderloin (medaillons, 3 cm, low-temperature cooking)",
|
||||
"beef_tenderloin_medaillons_3_cm_steam_cooking": "Beef tenderloin (medaillons, 3 cm, steam cooking)",
|
||||
"beetroot_whole_large": "Beetroot (whole, large)",
|
||||
"beetroot_whole_medium": "Beetroot (whole, medium)",
|
||||
"beetroot_whole_small": "Beetroot (whole, small)",
|
||||
"beluga_lentils": "Beluga lentils",
|
||||
"black_beans": "Black beans",
|
||||
"black_salsify_medium": "Black salsify (medium)",
|
||||
"black_salsify_thick": "Black salsify (thick)",
|
||||
"black_salsify_thin": "Black salsify (thin)",
|
||||
"black_tea": "Black tea",
|
||||
"blanching": "Blanching",
|
||||
"bologna_sausage": "Bologna sausage",
|
||||
"bottling": "Bottling",
|
||||
"bottling_hard": "Bottling (hard)",
|
||||
"bottling_medium": "Bottling (medium)",
|
||||
"bottling_soft": "Bottling (soft)",
|
||||
"bottom_heat": "Bottom heat",
|
||||
"bread_dumplings_boil_in_the_bag": "Bread dumplings (boil-in-the-bag)",
|
||||
"bread_dumplings_fresh": "Bread dumplings (fresh)",
|
||||
"brewing_unit_degrease": "Brewing unit degrease",
|
||||
"broad_beans": "Broad beans",
|
||||
"broccoli_florets_large": "Broccoli florets (large)",
|
||||
"broccoli_florets_medium": "Broccoli florets (medium)",
|
||||
"broccoli_florets_small": "Broccoli florets (small)",
|
||||
"broccoli_whole_large": "Broccoli (whole, large)",
|
||||
"broccoli_whole_medium": "Broccoli (whole, medium)",
|
||||
"broccoli_whole_small": "Broccoli (whole, small)",
|
||||
"brown_lentils": "Brown lentils",
|
||||
"bruehwurst_sausages": "Brühwurst sausages",
|
||||
"brussels_sprout": "Brussels sprout",
|
||||
"bulgur": "Bulgur",
|
||||
"bunched_carrots_cut_into_batons": "Bunched carrots (cut into batons)",
|
||||
"bunched_carrots_diced": "Bunched carrots (diced)",
|
||||
"bunched_carrots_halved": "Bunched carrots (halved)",
|
||||
"bunched_carrots_quartered": "Bunched carrots (quartered)",
|
||||
"bunched_carrots_sliced": "Bunched carrots (sliced)",
|
||||
"bunched_carrots_whole_large": "Bunched carrots (whole, large)",
|
||||
"bunched_carrots_whole_medium": "Bunched carrots (whole, medium)",
|
||||
"bunched_carrots_whole_small": "Bunched carrots (whole, small)",
|
||||
"cafe_au_lait": "Café au lait",
|
||||
"caffe_latte": "Caffè latte",
|
||||
"cappuccino": "Cappuccino",
|
||||
"cappuccino_italiano": "Cappuccino Italiano",
|
||||
"carp": "Carp",
|
||||
"carrots_cut_into_batons": "Carrots (cut into batons)",
|
||||
"carrots_diced": "Carrots (diced)",
|
||||
"carrots_halved": "Carrots (halved)",
|
||||
"carrots_quartered": "Carrots (quartered)",
|
||||
"carrots_sliced": "Carrots (sliced)",
|
||||
"carrots_whole_large": "Carrots (whole, large)",
|
||||
"carrots_whole_medium": "Carrots (whole, medium)",
|
||||
"carrots_whole_small": "Carrots (whole, small)",
|
||||
"cauliflower_florets_large": "Cauliflower florets (large)",
|
||||
"cauliflower_florets_medium": "Cauliflower florets (medium)",
|
||||
"cauliflower_florets_small": "Cauliflower florets (small)",
|
||||
"cauliflower_whole_large": "Cauliflower (whole, large)",
|
||||
"cauliflower_whole_medium": "Cauliflower (whole, medium)",
|
||||
"cauliflower_whole_small": "Cauliflower (whole, small)",
|
||||
"celeriac_cut_into_batons": "Celeriac (cut into batons)",
|
||||
"celeriac_diced": "Celeriac (diced)",
|
||||
"celeriac_sliced": "Celeriac (sliced)",
|
||||
"celery_pieces": "Celery (pieces)",
|
||||
"celery_sliced": "Celery (sliced)",
|
||||
"cep": "Cep",
|
||||
"chanterelle": "Chanterelle",
|
||||
"char": "Char",
|
||||
"check_appliance": "Check appliance",
|
||||
"cheesecake_one_large": "Cheesecake (one large)",
|
||||
"cheesecake_several_small": "Cheesecake (several small)",
|
||||
"chick_peas": "Chick peas",
|
||||
"chicken_tikka_masala_with_rice": "Chicken Tikka Masala with rice",
|
||||
"chinese_cabbage_cut": "Chinese cabbage (cut)",
|
||||
"chongming_rapid_steam_cooking": "Chongming (rapid steam cooking)",
|
||||
"chongming_steam_cooking": "Chongming (steam cooking)",
|
||||
"christmas_pudding_cooking": "Christmas pudding (cooking)",
|
||||
"christmas_pudding_heating": "Christmas pudding (heating)",
|
||||
"clean_machine": "Clean machine",
|
||||
"coalfish_fillet_2_cm": "Coalfish (fillet, 2 cm)",
|
||||
"coalfish_fillet_3_cm": "Coalfish (fillet, 3 cm)",
|
||||
"coalfish_piece": "Coalfish (piece)",
|
||||
"cockles": "Cockles",
|
||||
"codfish_fillet": "Codfish (fillet)",
|
||||
"codfish_piece": "Codfish (piece)",
|
||||
"coffee": "Coffee",
|
||||
"coffee_pot": "Coffee pot",
|
||||
"common_beans": "Common beans",
|
||||
"common_sole_fillet_1_cm": "Common sole (fillet, 1 cm)",
|
||||
"common_sole_fillet_2_cm": "Common sole (fillet, 2 cm)",
|
||||
"conventional_heat": "Conventional heat",
|
||||
"cook_bacon": "Cook bacon",
|
||||
"cool_air": "Cool air",
|
||||
"corn_on_the_cob": "Corn on the cob",
|
||||
"cottons": "Cottons",
|
||||
"cottons_eco": "Cottons ECO",
|
||||
"cottons_hygiene": "Cottons hygiene",
|
||||
"courgette_diced": "Courgette (diced)",
|
||||
"courgette_sliced": "Courgette (sliced)",
|
||||
"cranberries": "Cranberries",
|
||||
"crevettes": "Crevettes",
|
||||
"curtains": "Curtains",
|
||||
"dark_garments": "Dark garments",
|
||||
"decrystallise_honey": "Decrystallise honey",
|
||||
"defrost": "Defrost",
|
||||
"defrosting_with_microwave": "Defrosting with microwave",
|
||||
"defrosting_with_steam": "Defrosting with steam",
|
||||
"delicates": "Delicates",
|
||||
"denim": "Denim",
|
||||
"descale": "Descale",
|
||||
"descaling": "Appliance descaling",
|
||||
"dissolve_gelatine": "Dissolve gelatine",
|
||||
"down_duvets": "Down duvets",
|
||||
"down_filled_items": "Down-filled items",
|
||||
"drain_spin": "Drain/spin",
|
||||
"dutch_hash": "Dutch hash",
|
||||
"eco": "ECO",
|
||||
"eco_40_60": "ECO 40-60",
|
||||
"eco_fan_heat": "ECO fan heat",
|
||||
"eco_steam_cooking": "ECO steam cooking",
|
||||
"economy_grill": "Economy grill",
|
||||
"eggplant_diced": "Eggplant (diced)",
|
||||
"eggplant_sliced": "Eggplant (sliced)",
|
||||
"endive_halved": "Endive (halved)",
|
||||
"endive_quartered": "Endive (quartered)",
|
||||
"endive_strips": "Endive (strips)",
|
||||
"espresso": "Espresso",
|
||||
"espresso_macchiato": "Espresso macchiato",
|
||||
"express": "Express",
|
||||
"express_20": "Express 20'",
|
||||
"extra_quiet": "Extra quiet",
|
||||
"fan_grill": "Fan grill",
|
||||
"fan_plus": "Fan plus",
|
||||
"fennel_halved": "Fennel (halved)",
|
||||
"fennel_quartered": "Fennel (quartered)",
|
||||
"fennel_strips": "Fennel (strips)",
|
||||
"first_wash": "First wash",
|
||||
"flat_white": "Flat white",
|
||||
"freshen_up": "Freshen up",
|
||||
"fruit_tea": "Fruit tea",
|
||||
"full_grill": "Full grill",
|
||||
"gentle": "Gentle",
|
||||
"gentle_denim": "Gentle denim",
|
||||
"gentle_minimum_iron": "Gentle minimum iron",
|
||||
"gentle_smoothing": "Gentle smoothing",
|
||||
"german_turnip_cut_into_batons": "German turnip (cut into batons)",
|
||||
"german_turnip_sliced": "German turnip (sliced)",
|
||||
"gilt_head_bream_fillet": "Gilt-head bream (fillet)",
|
||||
"gilt_head_bream_whole": "Gilt-head bream (whole)",
|
||||
"glasses_warm": "Glasses warm",
|
||||
"gnocchi_fresh": "Gnocchi (fresh)",
|
||||
"goose_barnacles": "Goose barnacles",
|
||||
"gooseberries": "Gooseberries",
|
||||
"goulash_soup": "Goulash soup",
|
||||
"green_asparagus_medium": "Green asparagus (medium)",
|
||||
"green_asparagus_thick": "Green asparagus (thick)",
|
||||
"green_asparagus_thin": "Green asparagus (thin)",
|
||||
"green_beans_cut": "Green beans (cut)",
|
||||
"green_beans_whole": "Green beans (whole)",
|
||||
"green_cabbage_cut": "Green cabbage (cut)",
|
||||
"green_spelt_cracked": "Green spelt (cracked)",
|
||||
"green_spelt_whole": "Green spelt (whole)",
|
||||
"green_split_peas": "Green split peas",
|
||||
"green_tea": "Green tea",
|
||||
"greenage_plums": "Greenage plums",
|
||||
"halibut_fillet_2_cm": "Halibut (fillet, 2 cm)",
|
||||
"halibut_fillet_3_cm": "Halibut (fillet, 3 cm)",
|
||||
"heat_crockery": "Heat crockery",
|
||||
"heating_damp_flannels": "Heating damp flannels",
|
||||
"hens_eggs_size_l_hard": "Hen’s eggs (size „L“, hard)",
|
||||
"hens_eggs_size_l_medium": "Hen’s eggs (size „L“, medium)",
|
||||
"hens_eggs_size_l_soft": "Hen’s eggs (size „L“, soft)",
|
||||
"hens_eggs_size_m_hard": "Hen’s eggs (size „M“, hard)",
|
||||
"hens_eggs_size_m_medium": "Hen’s eggs (size „M“, medium)",
|
||||
"hens_eggs_size_m_soft": "Hen’s eggs (size „M“, soft)",
|
||||
"hens_eggs_size_s_hard": "Hen’s eggs (size „S“, hard)",
|
||||
"hens_eggs_size_s_medium": "Hen’s eggs (size „S“, medium)",
|
||||
"hens_eggs_size_s_soft": "Hen’s eggs (size „S“, soft)",
|
||||
"hens_eggs_size_xl_hard": "Hen’s eggs (size „XL“, hard)",
|
||||
"hens_eggs_size_xl_medium": "Hen’s eggs (size „XL“, medium)",
|
||||
"hens_eggs_size_xl_soft": "Hen’s eggs (size „XL“, soft)",
|
||||
"herbal_tea": "Herbal tea",
|
||||
"hot_milk": "Hot milk",
|
||||
"hot_water": "Hot water",
|
||||
"huanghuanian_rapid_steam_cooking": "Huanghuanian (rapid steam cooking)",
|
||||
"huanghuanian_steam_cooking": "Huanghuanian (steam cooking)",
|
||||
"hygiene": "Hygiene",
|
||||
"intensive": "Intensive",
|
||||
"intensive_bake": "Intensive bake",
|
||||
"iridescent_shark_fillet": "Iridescent shark (fillet)",
|
||||
"japanese_tea": "Japanese tea",
|
||||
"jasmine_rice_rapid_steam_cooking": "Jasmine rice (rapid steam cooking)",
|
||||
"jasmine_rice_steam_cooking": "Jasmine rice (steam cooking)",
|
||||
"jerusalem_artichoke_diced": "Jerusalem artichoke (diced)",
|
||||
"jerusalem_artichoke_sliced": "Jerusalem artichoke (sliced)",
|
||||
"kale_cut": "Kale (cut)",
|
||||
"kasseler_piece": "Kasseler (piece)",
|
||||
"kasseler_slice": "Kasseler (slice)",
|
||||
"keeping_warm": "Keeping warm",
|
||||
"king_prawns": "King prawns",
|
||||
"knuckle_of_pork_cured": "Knuckle of pork (cured)",
|
||||
"knuckle_of_pork_fresh": "Knuckle of pork (fresh)",
|
||||
"large_pillows": "Large pillows",
|
||||
"large_shrimps": "Large shrimps",
|
||||
"latte_macchiato": "Latte macchiato",
|
||||
"leek_pieces": "Leek (pieces)",
|
||||
"leek_rings": "Leek (rings)",
|
||||
"long_coffee": "Long coffee",
|
||||
"long_grain_rice_general_rapid_steam_cooking": "Long grain rice (general, rapid steam cooking)",
|
||||
"long_grain_rice_general_steam_cooking": "Long grain rice (general, steam cooking)",
|
||||
"maintenance": "Maintenance program",
|
||||
"make_yoghurt": "Make yoghurt",
|
||||
"mangel_cut": "Mangel (cut)",
|
||||
"meat_for_soup_back_or_top_rib": "Meat for soup (back or top rib)",
|
||||
"meat_for_soup_brisket": "Meat for soup (brisket)",
|
||||
"meat_for_soup_leg_steak": "Meat for soup (leg steak)",
|
||||
"meat_with_rice": "Meat with rice",
|
||||
"melt_chocolate": "Melt chocolate",
|
||||
"menu_cooking": "Menu cooking",
|
||||
"microwave": "Microwave",
|
||||
"milk_foam": "Milk foam",
|
||||
"milk_pipework_clean": "Milk pipework clean",
|
||||
"milk_pipework_rinse": "Milk pipework rinse",
|
||||
"millet": "Millet",
|
||||
"minimum_iron": "Minimum iron",
|
||||
"mirabelles": "Mirabelles",
|
||||
"moisture_plus_auto_roast": "Moisture plus + Auto roast",
|
||||
"moisture_plus_conventional_heat": "Moisture plus + Conventional heat",
|
||||
"moisture_plus_fan_plus": "Moisture plus + Fan plus",
|
||||
"moisture_plus_intensive_bake": "Moisture plus + Intensive bake",
|
||||
"mushrooms_diced": "Mushrooms (diced)",
|
||||
"mushrooms_halved": "Mushrooms (halved)",
|
||||
"mushrooms_quartered": "Mushrooms (quartered)",
|
||||
"mushrooms_sliced": "Mushrooms (sliced)",
|
||||
"mushrooms_whole": "Mushrooms (whole)",
|
||||
"mussels": "Mussels",
|
||||
"mussels_in_sauce": "Mussels in sauce",
|
||||
"nectarines_peaches_halved_skinning": "Nectarines/peaches (halved, skinning)",
|
||||
"nectarines_peaches_halved_steam_cooking": "Nectarines/peaches (halved, steam cooking)",
|
||||
"nectarines_peaches_quartered": "Nectarines/peaches (quartered)",
|
||||
"nectarines_peaches_wedges": "Nectarines/peaches (wedges)",
|
||||
"nile_perch_fillet_2_cm": "Nile perch (fillet, 2 cm)",
|
||||
"nile_perch_fillet_3_cm": "Nile perch (fillet, 3 cm)",
|
||||
"no_program": "No program",
|
||||
"normal": "[%key:common::state::normal%]",
|
||||
"oats_cracked": "Oats (cracked)",
|
||||
"oats_whole": "Oats (whole)",
|
||||
"outerwear": "Outerwear",
|
||||
"oyster_mushroom_diced": "Oyster mushroom (diced)",
|
||||
"oyster_mushroom_strips": "Oyster mushroom (strips)",
|
||||
"oyster_mushroom_whole": "Oyster mushroom (whole)",
|
||||
"parboiled_rice_rapid_steam_cooking": "Parboiled rice (rapid steam cooking)",
|
||||
"parboiled_rice_steam_cooking": "Parboiled rice (steam cooking)",
|
||||
"parisian_carrots_large": "Parisian carrots (large)",
|
||||
"parisian_carrots_medium": "Parisian carrots (medium)",
|
||||
"parisian_carrots_small": "Parisian carrots (small)",
|
||||
"parsley_root_cut_into_batons": "Parsley root (cut into batons)",
|
||||
"parsley_root_diced": "Parsley root (diced)",
|
||||
"parsley_root_sliced": "Parsley root (sliced)",
|
||||
"parsnip_cut_into_batons": "Parsnip (cut into batons)",
|
||||
"parsnip_diced": "Parsnip (diced)",
|
||||
"parsnip_sliced": "Parsnip (sliced)",
|
||||
"pasta_paela": "Pasta/Paela",
|
||||
"pears_halved": "Pears (halved)",
|
||||
"pears_quartered": "Pears (quartered)",
|
||||
"pears_to_cook_large_halved": "Pears to cook (large, halved)",
|
||||
"pears_to_cook_large_quartered": "Pears to cook (large, quartered)",
|
||||
"pears_to_cook_large_whole": "Pears to cook (large, whole)",
|
||||
"pears_to_cook_medium_halved": "Pears to cook (medium, halved)",
|
||||
"pears_to_cook_medium_quartered": "Pears to cook (medium, quartered)",
|
||||
"pears_to_cook_medium_whole": "Pears to cook (medium, whole)",
|
||||
"pears_to_cook_small_halved": "Pears to cook (small, halved)",
|
||||
"pears_to_cook_small_quartered": "Pears to cook (small, quartered)",
|
||||
"pears_to_cook_small_whole": "Pears to cook (small, whole)",
|
||||
"pears_wedges": "Pears (wedges)",
|
||||
"peas": "Peas",
|
||||
"pepper_diced": "Pepper (diced)",
|
||||
"pepper_halved": "Pepper (halved)",
|
||||
"pepper_quartered": "Pepper (quartered)",
|
||||
"pepper_strips": "Pepper (strips)",
|
||||
"perch_fillet_2_cm": "Perch (fillet, 2 cm)",
|
||||
"perch_fillet_3_cm": "Perch (fillet, 3 cm)",
|
||||
"perch_whole": "Perch (whole)",
|
||||
"pike_fillet": "Pike (fillet)",
|
||||
"pike_piece": "Pike (piece)",
|
||||
"pillows": "Pillows",
|
||||
"pinto_beans": "Pinto beans",
|
||||
"plaice_fillet_1_cm": "Plaice (fillet, 1 cm)",
|
||||
"plaice_fillet_2_cm": "Plaice (fillet, 2 cm)",
|
||||
"plaice_whole_2_cm": "Plaice (whole, 2 cm)",
|
||||
"plaice_whole_3_cm": "Plaice (whole, 3 cm)",
|
||||
"plaice_whole_4_cm": "Plaice (whole, 4 cm)",
|
||||
"plums_halved": "Plums (halved)",
|
||||
"plums_whole": "Plums (whole)",
|
||||
"pointed_cabbage_cut": "Pointed cabbage (cut)",
|
||||
"polenta": "Polenta",
|
||||
"polenta_swiss_style_coarse_polenta": "Polenta Swiss style (coarse polenta)",
|
||||
"polenta_swiss_style_fine_polenta": "Polenta Swiss style (fine polenta)",
|
||||
"polenta_swiss_style_medium_polenta": "Polenta Swiss style (medium polenta)",
|
||||
"popcorn": "Popcorn",
|
||||
"pork_tenderloin_medaillons_3_cm": "Pork tenderloin (medaillons, 3 cm)",
|
||||
"pork_tenderloin_medaillons_4_cm": "Pork tenderloin (medaillons, 4 cm)",
|
||||
"pork_tenderloin_medaillons_5_cm": "Pork tenderloin (medaillons, 5 cm)",
|
||||
"potato_dumplings_half_half_boil_in_bag": "Potato dumplings (half/half, boil-in-bag)",
|
||||
"potato_dumplings_half_half_deep_frozen": "Potato dumplings (half/half, deep-frozen)",
|
||||
"potato_dumplings_raw_boil_in_bag": "Potato dumplings (raw, boil-in-bag)",
|
||||
"potato_dumplings_raw_deep_frozen": "Potato dumplings (raw, deep-frozen)",
|
||||
"potatoes_floury_diced": "Potatoes (floury, diced)",
|
||||
"potatoes_floury_halved": "Potatoes (floury, halved)",
|
||||
"potatoes_floury_quartered": "Potatoes (floury, quartered)",
|
||||
"potatoes_floury_whole_large": "Potatoes (floury, whole, large)",
|
||||
"potatoes_floury_whole_medium": "Potatoes (floury, whole, medium)",
|
||||
"potatoes_floury_whole_small": "Potatoes (floury, whole, small)",
|
||||
"potatoes_in_the_skin_floury_large": "Potatoes (in the skin, floury, large)",
|
||||
"potatoes_in_the_skin_floury_medium": "Potatoes (in the skin, floury, medium)",
|
||||
"potatoes_in_the_skin_floury_small": "Potatoes (in the skin, floury, small)",
|
||||
"potatoes_in_the_skin_mainly_waxy_large": "Potatoes (in the skin, mainly waxy, large)",
|
||||
"potatoes_in_the_skin_mainly_waxy_medium": "Potatoes (in the skin, mainly waxy, medium)",
|
||||
"potatoes_in_the_skin_mainly_waxy_small": "Potatoes (in the skin, mainly waxy, small)",
|
||||
"potatoes_in_the_skin_waxy_large_rapid_steam_cooking": "Potatoes (in the skin, waxy, large, rapid steam cooking)",
|
||||
"potatoes_in_the_skin_waxy_large_steam_cooking": "Potatoes (in the skin, waxy, large, steam cooking)",
|
||||
"potatoes_in_the_skin_waxy_medium_rapid_steam_cooking": "Potatoes (in the skin, waxy, medium, rapid steam cooking)",
|
||||
"potatoes_in_the_skin_waxy_medium_steam_cooking": "Potatoes (in the skin, waxy, medium, steam cooking)",
|
||||
"potatoes_in_the_skin_waxy_small_rapid_steam_cooking": "Potatoes (in the skin, waxy, small, rapid steam cooking)",
|
||||
"potatoes_in_the_skin_waxy_small_steam_cooking": "Potatoes (in the skin, waxy, small, steam cooking)",
|
||||
"potatoes_mainly_waxy_diced": "Potatoes (mainly waxy, diced)",
|
||||
"potatoes_mainly_waxy_halved": "Potatoes (mainly waxy, halved)",
|
||||
"potatoes_mainly_waxy_large": "Potatoes (mainly waxy, large)",
|
||||
"potatoes_mainly_waxy_medium": "Potatoes (mainly waxy, medium)",
|
||||
"potatoes_mainly_waxy_quartered": "Potatoes (mainly waxy, quartered)",
|
||||
"potatoes_mainly_waxy_small": "Potatoes (mainly waxy, small)",
|
||||
"potatoes_waxy_diced": "Potatoes (waxy, diced)",
|
||||
"potatoes_waxy_halved": "Potatoes (waxy, halved)",
|
||||
"potatoes_waxy_quartered": "Potatoes (waxy, quartered)",
|
||||
"potatoes_waxy_whole_large": "Potatoes (waxy, whole, large)",
|
||||
"potatoes_waxy_whole_medium": "Potatoes (waxy, whole, medium)",
|
||||
"potatoes_waxy_whole_small": "Potatoes (waxy, whole, small)",
|
||||
"poularde_breast": "Poularde breast",
|
||||
"poularde_whole": "Poularde (whole)",
|
||||
"power_wash": "PowerWash",
|
||||
"prawns": "Prawns",
|
||||
"proofing": "Proofing",
|
||||
"prove_15_min": "Prove for 15 min",
|
||||
"prove_30_min": "Prove for 30 min",
|
||||
"prove_45_min": "Prove for 45 min",
|
||||
"prove_dough": "Prove dough",
|
||||
"pumpkin_diced": "Pumpkin (diced)",
|
||||
"pumpkin_soup": "Pumpkin soup",
|
||||
"quick_mw": "Quick MW",
|
||||
"quick_power_wash": "QuickPowerWash",
|
||||
"quinces_diced": "Quinces (diced)",
|
||||
"quinoa": "Quinoa",
|
||||
"rapid_steam_cooking": "Rapid steam cooking",
|
||||
"ravioli_fresh": "Ravioli (fresh)",
|
||||
"razor_clams_large": "Razor clams (large)",
|
||||
"razor_clams_medium": "Razor clams (medium)",
|
||||
"razor_clams_small": "Razor clams (small)",
|
||||
"red_beans": "Red beans",
|
||||
"red_cabbage_cut": "Red cabbage (cut)",
|
||||
"red_lentils": "Red lentils",
|
||||
"red_snapper_fillet_2_cm": "Red snapper (fillet, 2 cm)",
|
||||
"red_snapper_fillet_3_cm": "Red snapper (fillet, 3 cm)",
|
||||
"redfish_fillet_2_cm": "Redfish (fillet, 2 cm)",
|
||||
"redfish_fillet_3_cm": "Redfish (fillet, 3 cm)",
|
||||
"redfish_piece": "Redfish (piece)",
|
||||
"reheating_with_microwave": "Reheating with microwave",
|
||||
"reheating_with_steam": "Reheating with steam",
|
||||
"rhubarb_chunks": "Rhubarb chunks",
|
||||
"rice_pudding_rapid_steam_cooking": "Rice pudding (rapid steam cooking)",
|
||||
"rice_pudding_steam_cooking": "Rice pudding (steam cooking)",
|
||||
"rinse": "Rinse",
|
||||
"rinse_out_lint": "Rinse out lint",
|
||||
"risotto": "Risotto",
|
||||
"ristretto": "Ristretto",
|
||||
"romanesco_florets_large": "Romanesco florets (large)",
|
||||
"romanesco_florets_medium": "Romanesco florets (medium)",
|
||||
"romanesco_florets_small": "Romanesco florets (small)",
|
||||
"romanesco_whole_large": "Romanesco (whole, large)",
|
||||
"romanesco_whole_medium": "Romanesco (whole, medium)",
|
||||
"romanesco_whole_small": "Romanesco (whole, small)",
|
||||
"round_grain_rice_general_rapid_steam_cooking": "Round grain rice (general, rapid steam cooking)",
|
||||
"round_grain_rice_general_steam_cooking": "Round grain rice (general, steam cooking)",
|
||||
"runner_beans_pieces": "Runner beans (pieces)",
|
||||
"runner_beans_sliced": "Runner beans (sliced)",
|
||||
"runner_beans_whole": "Runner beans (whole)",
|
||||
"rye_cracked": "Rye (cracked)",
|
||||
"rye_whole": "Rye (whole)",
|
||||
"salmon_fillet_2_cm": "Salmon (fillet, 2 cm)",
|
||||
"salmon_fillet_3_cm": "Salmon (fillet, 3 cm)",
|
||||
"salmon_piece": "Salmon (piece)",
|
||||
"salmon_steak_2_cm": "Salmon (steak, 2 cm)",
|
||||
"salmon_steak_3_cm": "Salmon (steak, 3 cm)",
|
||||
"salmon_trout": "Salmon trout",
|
||||
"saucisson": "Saucisson",
|
||||
"savoy_cabbage_cut": "Savoy cabbage (cut)",
|
||||
"scallops": "Scallops",
|
||||
"schupfnudeln_potato_noodels": "Schupfnudeln (potato noodels)",
|
||||
"sea_devil_fillet_3_cm": "Sea devil (fillet, 3 cm)",
|
||||
"sea_devil_fillet_4_cm": "Sea devil (fillet, 4 cm)",
|
||||
"separate_rinse_starch": "Separate rinse/starch",
|
||||
"sheyang_rapid_steam_cooking": "Sheyang (rapid steam cooking)",
|
||||
"sheyang_steam_cooking": "Sheyang (steam cooking)",
|
||||
"shirts": "Shirts",
|
||||
"silent": "Silent",
|
||||
"silks": "Silks",
|
||||
"silks_handcare": "Silks handcare",
|
||||
"silverside_10_cm": "Silverside (10 cm)",
|
||||
"silverside_5_cm": "Silverside (5 cm)",
|
||||
"silverside_7_5_cm": "Silverside (7.5 cm)",
|
||||
"simiao_rapid_steam_cooking": "Simiao (rapid steam cooking)",
|
||||
"simiao_steam_cooking": "Simiao (steam cooking)",
|
||||
"small_shrimps": "Small shrimps",
|
||||
"smoothing": "Smoothing",
|
||||
"snow_pea": "Snow pea",
|
||||
"soak": "Soak",
|
||||
"solar_save": "SolarSave",
|
||||
"soup_hen": "Soup hen",
|
||||
"sour_cherries": "Sour cherries",
|
||||
"sous_vide": "Sous-vide",
|
||||
"spaetzle_fresh": "Spätzle (fresh)",
|
||||
"spelt_cracked": "Spelt (cracked)",
|
||||
"spelt_whole": "Spelt (whole)",
|
||||
"spinach": "Spinach",
|
||||
"sportswear": "Sportswear",
|
||||
"spot": "Spot",
|
||||
"standard_pillows": "Standard pillows",
|
||||
"starch": "Starch",
|
||||
"steam_care": "Steam care",
|
||||
"steam_cooking": "Steam cooking",
|
||||
"steam_smoothing": "Steam smoothing",
|
||||
"stuffed_cabbage": "Stuffed cabbage",
|
||||
"sweat_onions": "Sweat onions",
|
||||
"swede_cut_into_batons": "Swede (cut into batons)",
|
||||
"swede_diced": "Swede (diced)",
|
||||
"sweet_cheese_dumplings": "Sweet cheese dumplings",
|
||||
"sweet_cherries": "Sweet cherries",
|
||||
"swiss_toffee_cream_100_ml": "Swiss toffee cream (100 ml)",
|
||||
"swiss_toffee_cream_150_ml": "Swiss toffee cream (150 ml)",
|
||||
"tagliatelli_fresh": "Tagliatelli (fresh)",
|
||||
"tall_items": "Tall items",
|
||||
"teltow_turnip_diced": "Teltow turnip (diced)",
|
||||
"teltow_turnip_sliced": "Teltow turnip (sliced)",
|
||||
"tilapia_fillet_1_cm": "Tilapia (fillet, 1 cm)",
|
||||
"tilapia_fillet_2_cm": "Tilapia (fillet, 2 cm)",
|
||||
"toffee_date_dessert_one_large": "Toffee-date dessert (one large)",
|
||||
"toffee_date_dessert_several_small": "Toffee-date dessert (several small)",
|
||||
"top_heat": "Top heat",
|
||||
"tortellini_fresh": "Tortellini (fresh)",
|
||||
"trainers": "Trainers",
|
||||
"treacle_sponge_pudding_one_large": "Treacle sponge pudding (one large)",
|
||||
"treacle_sponge_pudding_several_small": "Treacle sponge pudding (several small)",
|
||||
"trout": "Trout",
|
||||
"tuna_fillet_2_cm": "Tuna (fillet, 2 cm)",
|
||||
"tuna_fillet_3_cm": "Tuna (fillet, 3 cm)",
|
||||
"tuna_steak": "Tuna (steak)",
|
||||
"turbo": "Turbo",
|
||||
"turbot_fillet_2_cm": "Turbot (fillet, 2 cm)",
|
||||
"turbot_fillet_3_cm": "Turbot (fillet, 3 cm)",
|
||||
"turkey_breast": "Turkey breast",
|
||||
"uonumma_koshihikari_rapid_steam_cooking": "Uonumma Koshihikari (rapid steam cooking)",
|
||||
"uonumma_koshihikari_steam_cooking": "Uonumma Koshihikari (steam cooking)",
|
||||
"veal_fillet_medaillons_1_cm": "Veal fillet (medaillons, 1 cm)",
|
||||
"veal_fillet_medaillons_2_cm": "Veal fillet (medaillons, 2 cm)",
|
||||
"veal_fillet_medaillons_3_cm": "Veal fillet (medaillons, 3 cm)",
|
||||
"veal_fillet_whole": "Veal fillet (whole)",
|
||||
"veal_sausages": "Veal sausages",
|
||||
"venus_clams": "Venus clams",
|
||||
"very_hot_water": "Very hot water",
|
||||
"viennese_silverside": "Viennese silverside",
|
||||
"warm_air": "Warm air",
|
||||
"wheat_cracked": "Wheat (cracked)",
|
||||
"wheat_whole": "Wheat (whole)",
|
||||
"white_asparagus_medium": "White asparagus (medium)",
|
||||
"white_asparagus_thick": "White asparagus (thick)",
|
||||
"white_asparagus_thin": "White asparagus (thin)",
|
||||
"white_beans": "White beans",
|
||||
"white_tea": "White tea",
|
||||
"whole_ham_reheating": "Whole ham (reheating)",
|
||||
"whole_ham_steam_cooking": "Whole ham (steam cooking)",
|
||||
"wholegrain_rice": "Wholegrain rice",
|
||||
"wild_rice": "Wild rice",
|
||||
"woollens": "Woollens",
|
||||
"woollens_handcare": "Woollens hand care",
|
||||
"wuchang_rapid_steam_cooking": "Wuchang (rapid steam cooking)",
|
||||
"wuchang_steam_cooking": "Wuchang (steam cooking)",
|
||||
"yam_halved": "Yam (halved)",
|
||||
"yam_quartered": "Yam (quartered)",
|
||||
"yam_strips": "Yam (strips)",
|
||||
"yeast_dumplings_fresh": "Yeast dumplings (fresh)",
|
||||
"yellow_beans_cut": "Yellow beans (cut)",
|
||||
"yellow_beans_whole": "Yellow beans (whole)",
|
||||
"yellow_split_peas": "Yellow split peas",
|
||||
"zander_fillet": "Zander (fillet)"
|
||||
}
|
||||
},
|
||||
"spin_speed": {
|
||||
"name": "Spin speed"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
@@ -190,6 +845,21 @@
|
||||
"superheating": "Superheating",
|
||||
"waiting_to_start": "Waiting to start"
|
||||
}
|
||||
},
|
||||
"temperature_zone_2": {
|
||||
"name": "Temperature zone 2"
|
||||
},
|
||||
"temperature_zone_3": {
|
||||
"name": "Temperature zone 3"
|
||||
},
|
||||
"water_consumption": {
|
||||
"name": "Water consumption"
|
||||
},
|
||||
"core_temperature": {
|
||||
"name": "Core temperature"
|
||||
},
|
||||
"core_target_temperature": {
|
||||
"name": "Core target temperature"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CLOUD, CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import MillDataUpdateCoordinator
|
||||
from .coordinator import MillDataUpdateCoordinator, MillHistoricDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
@@ -41,6 +41,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
key = entry.data[CONF_USERNAME]
|
||||
conn_type = CLOUD
|
||||
|
||||
historic_data_coordinator = MillHistoricDataUpdateCoordinator(
|
||||
hass,
|
||||
mill_data_connection=mill_data_connection,
|
||||
)
|
||||
historic_data_coordinator.async_add_listener(lambda: None)
|
||||
await historic_data_coordinator.async_config_entry_first_refresh()
|
||||
try:
|
||||
if not await mill_data_connection.connect():
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
@@ -4,18 +4,30 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from mill import Mill
|
||||
from mill import Heater, Mill
|
||||
from mill_local import Mill as MillLocal
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TWO_YEARS = 2 * 365 * 24
|
||||
|
||||
|
||||
class MillDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Mill data."""
|
||||
@@ -40,3 +52,104 @@ class MillDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
update_method=mill_data_connection.fetch_heater_and_sensor_data,
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
||||
|
||||
class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Mill historic data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
mill_data_connection: Mill,
|
||||
) -> None:
|
||||
"""Initialize global Mill data updater."""
|
||||
self.mill_data_connection = mill_data_connection
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="MillHistoricDataUpdateCoordinator",
|
||||
)
|
||||
|
||||
async def _async_update_data(self):
|
||||
"""Update historic data via API."""
|
||||
now = dt_util.utcnow()
|
||||
self.update_interval = (
|
||||
timedelta(hours=1) + now.replace(minute=1, second=0) - now
|
||||
)
|
||||
|
||||
recoder_instance = get_instance(self.hass)
|
||||
for dev_id, heater in self.mill_data_connection.devices.items():
|
||||
if not isinstance(heater, Heater):
|
||||
continue
|
||||
statistic_id = f"{DOMAIN}:energy_{slugify(dev_id)}"
|
||||
|
||||
last_stats = await recoder_instance.async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, statistic_id, True, set()
|
||||
)
|
||||
if not last_stats or not last_stats.get(statistic_id):
|
||||
hourly_data = (
|
||||
await self.mill_data_connection.fetch_historic_energy_usage(
|
||||
dev_id, n_days=TWO_YEARS
|
||||
)
|
||||
)
|
||||
hourly_data = dict(sorted(hourly_data.items(), key=lambda x: x[0]))
|
||||
_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
hourly_data = (
|
||||
await self.mill_data_connection.fetch_historic_energy_usage(
|
||||
dev_id,
|
||||
n_days=(
|
||||
now
|
||||
- dt_util.utc_from_timestamp(
|
||||
last_stats[statistic_id][0]["start"]
|
||||
)
|
||||
).days
|
||||
+ 2,
|
||||
)
|
||||
)
|
||||
if not hourly_data:
|
||||
continue
|
||||
hourly_data = dict(sorted(hourly_data.items(), key=lambda x: x[0]))
|
||||
start_time = next(iter(hourly_data))
|
||||
stats = await recoder_instance.async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
start_time,
|
||||
None,
|
||||
{statistic_id},
|
||||
"hour",
|
||||
None,
|
||||
{"sum", "state"},
|
||||
)
|
||||
stat = stats[statistic_id][0]
|
||||
|
||||
_sum = cast(float, stat["sum"]) - cast(float, stat["state"])
|
||||
last_stats_time = dt_util.utc_from_timestamp(stat["start"])
|
||||
|
||||
statistics = []
|
||||
|
||||
for start, state in hourly_data.items():
|
||||
if state is None:
|
||||
continue
|
||||
if (last_stats_time and start < last_stats_time) or start > now:
|
||||
continue
|
||||
_sum += state
|
||||
statistics.append(
|
||||
StatisticData(
|
||||
start=start,
|
||||
state=state,
|
||||
sum=_sum,
|
||||
)
|
||||
)
|
||||
metadata = StatisticMetaData(
|
||||
has_mean=False,
|
||||
has_sum=True,
|
||||
name=f"{heater.name}",
|
||||
source=DOMAIN,
|
||||
statistic_id=statistic_id,
|
||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
)
|
||||
async_add_external_statistics(self.hass, metadata, statistics)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "mill",
|
||||
"name": "Mill",
|
||||
"after_dependencies": ["recorder"],
|
||||
"codeowners": ["@danielhiversen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
|
||||
@@ -109,7 +109,7 @@
|
||||
"state": {
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"fault": "Fault",
|
||||
"fault": "[%key:common::state::fault%]",
|
||||
"invalid": "Invalid",
|
||||
"no_ev_connected": "No EV connected",
|
||||
"suspended": "Suspended"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.0"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.1"]
|
||||
}
|
||||
|
||||
@@ -156,10 +156,10 @@
|
||||
"ready": "Ready",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"mop_washing": "Washing mop",
|
||||
"self_clean_cleaning": "Self clean cleaning",
|
||||
"self_clean_deep_cleaning": "Self clean deep cleaning",
|
||||
"self_clean_rinsing": "Self clean rinsing",
|
||||
"self_clean_dehydrating": "Self clean drying",
|
||||
"self_clean_cleaning": "Self-clean cleaning",
|
||||
"self_clean_deep_cleaning": "Self-clean deep cleaning",
|
||||
"self_clean_rinsing": "Self-clean rinsing",
|
||||
"self_clean_dehydrating": "Self-clean drying",
|
||||
"drying": "Drying",
|
||||
"ventilating": "Ventilating",
|
||||
"reserving": "Reserving",
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyschlage.lock import AUTO_LOCK_TIMES
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -15,16 +17,7 @@ _DESCRIPTIONS = (
|
||||
key="auto_lock_time",
|
||||
translation_key="auto_lock_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
# valid values are from Schlage UI and validated by pyschlage
|
||||
options=[
|
||||
"0",
|
||||
"15",
|
||||
"30",
|
||||
"60",
|
||||
"120",
|
||||
"240",
|
||||
"300",
|
||||
],
|
||||
options=[str(n) for n in AUTO_LOCK_TIMES],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
"name": "Auto-lock time",
|
||||
"state": {
|
||||
"0": "[%key:common::state::disabled%]",
|
||||
"5": "5 seconds",
|
||||
"15": "15 seconds",
|
||||
"30": "30 seconds",
|
||||
"60": "1 minute",
|
||||
|
||||
@@ -293,6 +293,7 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
|
||||
translation_key="firmware_unsupported",
|
||||
translation_placeholders={"device": entry.title},
|
||||
)
|
||||
runtime_data.rpc_zigbee_enabled = device.zigbee_enabled
|
||||
runtime_data.rpc_supports_scripts = await device.supports_scripts()
|
||||
if runtime_data.rpc_supports_scripts:
|
||||
runtime_data.rpc_script_events = await get_rpc_scripts_event_types(
|
||||
|
||||
@@ -475,6 +475,8 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
if not supports_scripts:
|
||||
return self.async_abort(reason="no_scripts_support")
|
||||
if self.config_entry.runtime_data.rpc_zigbee_enabled:
|
||||
return self.async_abort(reason="zigbee_enabled")
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
@@ -90,6 +90,7 @@ class ShellyEntryData:
|
||||
rpc_poll: ShellyRpcPollingCoordinator | None = None
|
||||
rpc_script_events: dict[int, list[str]] | None = None
|
||||
rpc_supports_scripts: bool | None = None
|
||||
rpc_zigbee_enabled: bool | None = None
|
||||
|
||||
|
||||
type ShellyConfigEntry = ConfigEntry[ShellyEntryData]
|
||||
@@ -717,7 +718,10 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
||||
is updated.
|
||||
"""
|
||||
if not self.sleep_period:
|
||||
if self.config_entry.runtime_data.rpc_supports_scripts:
|
||||
if (
|
||||
self.config_entry.runtime_data.rpc_supports_scripts
|
||||
and not self.config_entry.runtime_data.rpc_zigbee_enabled
|
||||
):
|
||||
await self._async_connect_ble_scanner()
|
||||
else:
|
||||
await self._async_setup_outbound_websocket()
|
||||
|
||||
@@ -104,7 +104,8 @@
|
||||
},
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_scripts_support": "Device does not support scripts and cannot be used as a Bluetooth scanner."
|
||||
"no_scripts_support": "Device does not support scripts and cannot be used as a Bluetooth scanner.",
|
||||
"zigbee_enabled": "Device with Zigbee enabled cannot be used as a Bluetooth scanner. Please disable it to use the device as a Bluetooth scanner."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -24,6 +24,7 @@ from pysmartthings import (
|
||||
SmartThingsSinkError,
|
||||
Status,
|
||||
)
|
||||
from pysmartthings.models import HealthStatus
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -79,6 +80,7 @@ class FullDevice:
|
||||
|
||||
device: Device
|
||||
status: dict[str, ComponentStatus]
|
||||
online: bool
|
||||
|
||||
|
||||
type SmartThingsConfigEntry = ConfigEntry[SmartThingsData]
|
||||
@@ -192,7 +194,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry)
|
||||
devices = await client.get_devices()
|
||||
for device in devices:
|
||||
status = process_status(await client.get_device_status(device.device_id))
|
||||
device_status[device.device_id] = FullDevice(device=device, status=status)
|
||||
online = await client.get_device_health(device.device_id)
|
||||
device_status[device.device_id] = FullDevice(
|
||||
device=device, status=status, online=online.state == HealthStatus.ONLINE
|
||||
)
|
||||
except SmartThingsAuthenticationFailedError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
|
||||
@@ -10,8 +10,10 @@ from pysmartthings import (
|
||||
Command,
|
||||
ComponentStatus,
|
||||
DeviceEvent,
|
||||
DeviceHealthEvent,
|
||||
SmartThings,
|
||||
)
|
||||
from pysmartthings.models import HealthStatus
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -48,6 +50,7 @@ class SmartThingsEntity(Entity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.device.device_id)},
|
||||
)
|
||||
self._attr_available = device.online
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates."""
|
||||
@@ -61,8 +64,17 @@ class SmartThingsEntity(Entity):
|
||||
self._update_handler,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
self.client.add_device_availability_event_listener(
|
||||
self.device.device.device_id, self._availability_handler
|
||||
)
|
||||
)
|
||||
self._update_attr()
|
||||
|
||||
def _availability_handler(self, event: DeviceHealthEvent) -> None:
|
||||
self._attr_available = event.status != HealthStatus.OFFLINE
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _update_handler(self, event: DeviceEvent) -> None:
|
||||
self._internal_state[event.capability][event.attribute].value = event.value
|
||||
self._internal_state[event.capability][event.attribute].data = event.data
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.0.5"]
|
||||
"requirements": ["pysmartthings==3.2.0"]
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ rules:
|
||||
docs-installation-parameters:
|
||||
status: exempt
|
||||
comment: No parameters needed during installation
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
|
||||
@@ -11,6 +11,9 @@ import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import smtplib
|
||||
import socket
|
||||
import ssl
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -113,19 +116,19 @@ class MailNotificationService(BaseNotificationService):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server,
|
||||
port,
|
||||
timeout,
|
||||
sender,
|
||||
encryption,
|
||||
username,
|
||||
password,
|
||||
recipients,
|
||||
sender_name,
|
||||
debug,
|
||||
verify_ssl,
|
||||
ssl_context,
|
||||
):
|
||||
server: str,
|
||||
port: int,
|
||||
timeout: int,
|
||||
sender: str,
|
||||
encryption: str,
|
||||
username: str | None,
|
||||
password: str | None,
|
||||
recipients: list[str],
|
||||
sender_name: str | None,
|
||||
debug: bool,
|
||||
verify_ssl: bool,
|
||||
ssl_context: ssl.SSLContext | None,
|
||||
) -> None:
|
||||
"""Initialize the SMTP service."""
|
||||
self._server = server
|
||||
self._port = port
|
||||
@@ -141,8 +144,9 @@ class MailNotificationService(BaseNotificationService):
|
||||
self.tries = 2
|
||||
self._ssl_context = ssl_context
|
||||
|
||||
def connect(self):
|
||||
def connect(self) -> smtplib.SMTP_SSL | smtplib.SMTP:
|
||||
"""Connect/authenticate to SMTP Server."""
|
||||
mail: smtplib.SMTP_SSL | smtplib.SMTP
|
||||
if self.encryption == "tls":
|
||||
mail = smtplib.SMTP_SSL(
|
||||
self._server,
|
||||
@@ -161,12 +165,12 @@ class MailNotificationService(BaseNotificationService):
|
||||
mail.login(self.username, self.password)
|
||||
return mail
|
||||
|
||||
def connection_is_valid(self):
|
||||
def connection_is_valid(self) -> bool:
|
||||
"""Check for valid config, verify connectivity."""
|
||||
server = None
|
||||
try:
|
||||
server = self.connect()
|
||||
except (smtplib.socket.gaierror, ConnectionRefusedError):
|
||||
except (socket.gaierror, ConnectionRefusedError):
|
||||
_LOGGER.exception(
|
||||
(
|
||||
"SMTP server not found or refused connection (%s:%s). Please check"
|
||||
@@ -188,7 +192,7 @@ class MailNotificationService(BaseNotificationService):
|
||||
|
||||
return True
|
||||
|
||||
def send_message(self, message="", **kwargs):
|
||||
def send_message(self, message: str, **kwargs: Any) -> None:
|
||||
"""Build and send a message to a user.
|
||||
|
||||
Will send plain text normally, with pictures as attachments if images config is
|
||||
@@ -196,6 +200,7 @@ class MailNotificationService(BaseNotificationService):
|
||||
"""
|
||||
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
|
||||
msg: MIMEMultipart | MIMEText
|
||||
if data := kwargs.get(ATTR_DATA):
|
||||
if ATTR_HTML in data:
|
||||
msg = _build_html_msg(
|
||||
@@ -213,20 +218,24 @@ class MailNotificationService(BaseNotificationService):
|
||||
|
||||
msg["Subject"] = subject
|
||||
|
||||
if not (recipients := kwargs.get(ATTR_TARGET)):
|
||||
if targets := kwargs.get(ATTR_TARGET):
|
||||
recipients: list[str] = targets # ensured by NOTIFY_SERVICE_SCHEMA
|
||||
else:
|
||||
recipients = self.recipients
|
||||
msg["To"] = recipients if isinstance(recipients, str) else ",".join(recipients)
|
||||
msg["To"] = ",".join(recipients)
|
||||
|
||||
if self._sender_name:
|
||||
msg["From"] = f"{self._sender_name} <{self._sender}>"
|
||||
else:
|
||||
msg["From"] = self._sender
|
||||
|
||||
msg["X-Mailer"] = "Home Assistant"
|
||||
msg["Date"] = email.utils.format_datetime(dt_util.now())
|
||||
msg["Message-Id"] = email.utils.make_msgid()
|
||||
|
||||
return self._send_email(msg, recipients)
|
||||
|
||||
def _send_email(self, msg, recipients):
|
||||
def _send_email(self, msg: MIMEMultipart | MIMEText, recipients: list[str]) -> None:
|
||||
"""Send the message."""
|
||||
mail = self.connect()
|
||||
for _ in range(self.tries):
|
||||
@@ -246,13 +255,15 @@ class MailNotificationService(BaseNotificationService):
|
||||
mail.quit()
|
||||
|
||||
|
||||
def _build_text_msg(message):
|
||||
def _build_text_msg(message: str) -> MIMEText:
|
||||
"""Build plaintext email."""
|
||||
_LOGGER.debug("Building plain text email")
|
||||
return MIMEText(message)
|
||||
|
||||
|
||||
def _attach_file(hass, atch_name, content_id=""):
|
||||
def _attach_file(
|
||||
hass: HomeAssistant, atch_name: str, content_id: str | None = None
|
||||
) -> MIMEImage | MIMEApplication | None:
|
||||
"""Create a message attachment.
|
||||
|
||||
If MIMEImage is successful and content_id is passed (HTML), add images in-line.
|
||||
@@ -271,7 +282,7 @@ def _attach_file(hass, atch_name, content_id=""):
|
||||
translation_key="remote_path_not_allowed",
|
||||
translation_placeholders={
|
||||
"allow_list": allow_list,
|
||||
"file_path": file_path,
|
||||
"file_path": str(file_path),
|
||||
"file_name": file_name,
|
||||
"url": url,
|
||||
},
|
||||
@@ -282,6 +293,7 @@ def _attach_file(hass, atch_name, content_id=""):
|
||||
_LOGGER.warning("Attachment %s not found. Skipping", atch_name)
|
||||
return None
|
||||
|
||||
attachment: MIMEImage | MIMEApplication
|
||||
try:
|
||||
attachment = MIMEImage(file_bytes)
|
||||
except TypeError:
|
||||
@@ -305,7 +317,9 @@ def _attach_file(hass, atch_name, content_id=""):
|
||||
return attachment
|
||||
|
||||
|
||||
def _build_multipart_msg(hass, message, images):
|
||||
def _build_multipart_msg(
|
||||
hass: HomeAssistant, message: str, images: list[str]
|
||||
) -> MIMEMultipart:
|
||||
"""Build Multipart message with images as attachments."""
|
||||
_LOGGER.debug("Building multipart email with image attachme_build_html_msgnt(s)")
|
||||
msg = MIMEMultipart()
|
||||
@@ -320,7 +334,9 @@ def _build_multipart_msg(hass, message, images):
|
||||
return msg
|
||||
|
||||
|
||||
def _build_html_msg(hass, text, html, images):
|
||||
def _build_html_msg(
|
||||
hass: HomeAssistant, text: str, html: str, images: list[str]
|
||||
) -> MIMEMultipart:
|
||||
"""Build Multipart message with in-line images and rich HTML (UTF-8)."""
|
||||
_LOGGER.debug("Building HTML rich email")
|
||||
msg = MIMEMultipart("related")
|
||||
|
||||
@@ -36,6 +36,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_ssdp, bind_hass
|
||||
from homeassistant.util.logging import catch_log_exception
|
||||
|
||||
from . import websocket_api
|
||||
from .const import DOMAIN, SSDP_SCANNER, UPNP_SERVER
|
||||
from .scanner import (
|
||||
IntegrationMatchers,
|
||||
@@ -213,6 +214,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
await scanner.async_start()
|
||||
await server.async_start()
|
||||
websocket_api.async_setup(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
"""The ssdp integration websocket apis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.helpers.service_info.ssdp import SsdpServiceInfo
|
||||
|
||||
from .const import DOMAIN, SSDP_SCANNER
|
||||
from .scanner import Scanner, SsdpChange
|
||||
|
||||
FIELD_SSDP_ST: Final = "ssdp_st"
|
||||
FIELD_SSDP_LOCATION: Final = "ssdp_location"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the ssdp websocket API."""
|
||||
websocket_api.async_register_command(hass, ws_subscribe_discovery)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ssdp/subscribe_discovery",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_subscribe_discovery(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe advertisements websocket command."""
|
||||
scanner: Scanner = hass.data[DOMAIN][SSDP_SCANNER]
|
||||
msg_id: int = msg["id"]
|
||||
|
||||
def _async_event_message(message: dict[str, Any]) -> None:
|
||||
connection.send_message(
|
||||
json_bytes(websocket_api.event_message(msg_id, message))
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_on_data(info: SsdpServiceInfo, change: SsdpChange) -> None:
|
||||
if change is not SsdpChange.BYEBYE:
|
||||
_async_event_message({"add": [asdict(info)]})
|
||||
return
|
||||
remove_msg = {
|
||||
FIELD_SSDP_ST: info.ssdp_st,
|
||||
FIELD_SSDP_LOCATION: info.ssdp_location,
|
||||
}
|
||||
_async_event_message({"remove": [remove_msg]})
|
||||
|
||||
job = HassJob(_async_on_data)
|
||||
connection.send_message(json_bytes(websocket_api.result_message(msg_id)))
|
||||
connection.subscriptions[msg_id] = await scanner.async_register_callback(job, None)
|
||||
@@ -1,22 +1,29 @@
|
||||
"""The component for STIEBEL ELTRON heat pumps with ISGWeb Modbus module."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pymodbus.client import ModbusTcpClient
|
||||
from pystiebeleltron import pystiebeleltron
|
||||
from pystiebeleltron.pystiebeleltron import StiebelEltronAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME, Platform
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
DEVICE_DEFAULT_NAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
CONF_HUB = "hub"
|
||||
DEFAULT_HUB = "modbus_hub"
|
||||
from .const import CONF_HUB, DEFAULT_HUB, DOMAIN
|
||||
|
||||
MODBUS_DOMAIN = "modbus"
|
||||
DOMAIN = "stiebel_eltron"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -31,39 +38,109 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
|
||||
_PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the STIEBEL ELTRON unit.
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Set up the STIEBEL ELTRON component."""
|
||||
hub_config: dict[str, Any] | None = None
|
||||
if MODBUS_DOMAIN in config:
|
||||
for hub in config[MODBUS_DOMAIN]:
|
||||
if hub[CONF_NAME] == config[DOMAIN][CONF_HUB]:
|
||||
hub_config = hub
|
||||
break
|
||||
if hub_config is None:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_missing_hub",
|
||||
breaks_in_ha_version="2025.11.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_missing_hub",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Stiebel Eltron",
|
||||
},
|
||||
)
|
||||
return
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data={
|
||||
CONF_HOST: hub_config[CONF_HOST],
|
||||
CONF_PORT: hub_config[CONF_PORT],
|
||||
CONF_NAME: config[DOMAIN][CONF_NAME],
|
||||
},
|
||||
)
|
||||
if (
|
||||
result.get("type") is FlowResultType.ABORT
|
||||
and result.get("reason") != "already_configured"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2025.11.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Stiebel Eltron",
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
Will automatically load climate platform.
|
||||
"""
|
||||
name = config[DOMAIN][CONF_NAME]
|
||||
modbus_client = hass.data[MODBUS_DOMAIN][config[DOMAIN][CONF_HUB]]
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml",
|
||||
breaks_in_ha_version="2025.9.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Stiebel Eltron",
|
||||
},
|
||||
)
|
||||
|
||||
hass.data[DOMAIN] = {
|
||||
"name": name,
|
||||
"ste_data": StiebelEltronData(name, modbus_client),
|
||||
}
|
||||
|
||||
discovery.load_platform(hass, Platform.CLIMATE, DOMAIN, {}, config)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the STIEBEL ELTRON component."""
|
||||
if DOMAIN in config:
|
||||
hass.async_create_task(_async_import(hass, config))
|
||||
return True
|
||||
|
||||
|
||||
class StiebelEltronData:
|
||||
"""Get the latest data and update the states."""
|
||||
type StiebelEltronConfigEntry = ConfigEntry[StiebelEltronAPI]
|
||||
|
||||
def __init__(self, name: str, modbus_client: ModbusTcpClient) -> None:
|
||||
"""Init the STIEBEL ELTRON data object."""
|
||||
|
||||
self.api = pystiebeleltron.StiebelEltronAPI(modbus_client, 1)
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: StiebelEltronConfigEntry
|
||||
) -> bool:
|
||||
"""Set up STIEBEL ELTRON from a config entry."""
|
||||
client = StiebelEltronAPI(
|
||||
ModbusTcpClient(entry.data[CONF_HOST], port=entry.data[CONF_PORT]), 1
|
||||
)
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self) -> None:
|
||||
"""Update unit data."""
|
||||
if not self.api.update():
|
||||
_LOGGER.warning("Modbus read failed")
|
||||
else:
|
||||
_LOGGER.debug("Data updated successfully")
|
||||
success = await hass.async_add_executor_job(client.update)
|
||||
if not success:
|
||||
raise ConfigEntryNotReady("Could not connect to device")
|
||||
|
||||
entry.runtime_data = client
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: StiebelEltronConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
@@ -5,6 +5,8 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pystiebeleltron.pystiebeleltron import StiebelEltronAPI
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_ECO,
|
||||
ClimateEntity,
|
||||
@@ -13,10 +15,9 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import DOMAIN as STE_DOMAIN, StiebelEltronData
|
||||
from . import StiebelEltronConfigEntry
|
||||
|
||||
DEPENDENCIES = ["stiebel_eltron"]
|
||||
|
||||
@@ -56,17 +57,14 @@ HA_TO_STE_HVAC = {
|
||||
HA_TO_STE_PRESET = {k: i for i, k in STE_TO_HA_PRESET.items()}
|
||||
|
||||
|
||||
def setup_platform(
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
entry: StiebelEltronConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the StiebelEltron platform."""
|
||||
name = hass.data[STE_DOMAIN]["name"]
|
||||
ste_data = hass.data[STE_DOMAIN]["ste_data"]
|
||||
"""Set up STIEBEL ELTRON climate platform."""
|
||||
|
||||
add_entities([StiebelEltron(name, ste_data)], True)
|
||||
async_add_entities([StiebelEltron(entry.title, entry.runtime_data)], True)
|
||||
|
||||
|
||||
class StiebelEltron(ClimateEntity):
|
||||
@@ -81,7 +79,7 @@ class StiebelEltron(ClimateEntity):
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, name: str, ste_data: StiebelEltronData) -> None:
|
||||
def __init__(self, name: str, client: StiebelEltronAPI) -> None:
|
||||
"""Initialize the unit."""
|
||||
self._name = name
|
||||
self._target_temperature: float | int | None = None
|
||||
@@ -89,19 +87,17 @@ class StiebelEltron(ClimateEntity):
|
||||
self._current_humidity: float | int | None = None
|
||||
self._operation: str | None = None
|
||||
self._filter_alarm: bool | None = None
|
||||
self._force_update: bool = False
|
||||
self._ste_data = ste_data
|
||||
self._client = client
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update unit attributes."""
|
||||
self._ste_data.update(no_throttle=self._force_update)
|
||||
self._force_update = False
|
||||
self._client.update()
|
||||
|
||||
self._target_temperature = self._ste_data.api.get_target_temp()
|
||||
self._current_temperature = self._ste_data.api.get_current_temp()
|
||||
self._current_humidity = self._ste_data.api.get_current_humidity()
|
||||
self._filter_alarm = self._ste_data.api.get_filter_alarm_status()
|
||||
self._operation = self._ste_data.api.get_operation()
|
||||
self._target_temperature = self._client.get_target_temp()
|
||||
self._current_temperature = self._client.get_current_temp()
|
||||
self._current_humidity = self._client.get_current_humidity()
|
||||
self._filter_alarm = self._client.get_filter_alarm_status()
|
||||
self._operation = self._client.get_operation()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Update %s, current temp: %s", self._name, self._current_temperature
|
||||
@@ -170,20 +166,17 @@ class StiebelEltron(ClimateEntity):
|
||||
return
|
||||
new_mode = HA_TO_STE_HVAC.get(hvac_mode)
|
||||
_LOGGER.debug("set_hvac_mode: %s -> %s", self._operation, new_mode)
|
||||
self._ste_data.api.set_operation(new_mode)
|
||||
self._force_update = True
|
||||
self._client.set_operation(new_mode)
|
||||
|
||||
def set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
target_temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
if target_temperature is not None:
|
||||
_LOGGER.debug("set_temperature: %s", target_temperature)
|
||||
self._ste_data.api.set_target_temp(target_temperature)
|
||||
self._force_update = True
|
||||
self._client.set_target_temp(target_temperature)
|
||||
|
||||
def set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
new_mode = HA_TO_STE_PRESET.get(preset_mode)
|
||||
_LOGGER.debug("set_hvac_mode: %s -> %s", self._operation, new_mode)
|
||||
self._ste_data.api.set_operation(new_mode)
|
||||
self._force_update = True
|
||||
self._client.set_operation(new_mode)
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
"""Config flow for the STIEBEL ELTRON integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pymodbus.client import ModbusTcpClient
|
||||
from pystiebeleltron.pystiebeleltron import StiebelEltronAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
|
||||
|
||||
from .const import DEFAULT_PORT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StiebelEltronConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for STIEBEL ELTRON."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
client = StiebelEltronAPI(
|
||||
ModbusTcpClient(user_input[CONF_HOST], port=user_input[CONF_PORT]), 1
|
||||
)
|
||||
try:
|
||||
success = await self.hass.async_add_executor_job(client.update)
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not success:
|
||||
errors["base"] = "cannot_connect"
|
||||
if not errors:
|
||||
return self.async_create_entry(title="Stiebel Eltron", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import."""
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
client = StiebelEltronAPI(
|
||||
ModbusTcpClient(user_input[CONF_HOST], port=user_input[CONF_PORT]), 1
|
||||
)
|
||||
try:
|
||||
success = await self.hass.async_add_executor_job(client.update)
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if not success:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
},
|
||||
)
|
||||
@@ -0,0 +1,8 @@
|
||||
"""Constants for the STIEBEL ELTRON integration."""
|
||||
|
||||
DOMAIN = "stiebel_eltron"
|
||||
|
||||
CONF_HUB = "hub"
|
||||
|
||||
DEFAULT_HUB = "modbus_hub"
|
||||
DEFAULT_PORT = 502
|
||||
@@ -1,11 +1,10 @@
|
||||
{
|
||||
"domain": "stiebel_eltron",
|
||||
"name": "STIEBEL ELTRON",
|
||||
"codeowners": ["@fucm"],
|
||||
"dependencies": ["modbus"],
|
||||
"codeowners": ["@fucm", "@ThyMYthOS"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/stiebel_eltron",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pymodbus", "pystiebeleltron"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pystiebeleltron==0.0.1.dev2"]
|
||||
"requirements": ["pystiebeleltron==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Stiebel Eltron device.",
|
||||
"port": "The port of your Stiebel Eltron device."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml": {
|
||||
"title": "The {integration_title} YAML configuration is being removed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove both the `{domain}` and the relevant Modbus configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "YAML import failed due to a connection error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_missing_hub": {
|
||||
"title": "YAML import failed due to incomplete config",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but the configuration was not complete, thus we could not import your configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_unknown": {
|
||||
"title": "YAML import failed due to an unknown error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -91,6 +91,7 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
"""Handle the device going unavailable."""
|
||||
super()._async_handle_unavailable(service_info)
|
||||
self._was_unavailable = True
|
||||
_LOGGER.info("Device %s is unavailable", self.device_name)
|
||||
|
||||
@callback
|
||||
def _async_handle_bluetooth_event(
|
||||
@@ -114,6 +115,7 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
if not self.device.advertisement_changed(adv) and not self._was_unavailable:
|
||||
return
|
||||
self._was_unavailable = False
|
||||
_LOGGER.info("Device %s is online", self.device_name)
|
||||
self.device.update_from_advertisement(adv)
|
||||
super()._async_handle_bluetooth_event(service_info, change)
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ rules:
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Config flow for Samsung SyncThru."""
|
||||
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pysyncthru import ConnectionMode, SyncThru, SyncThruAPINotSupported
|
||||
@@ -44,12 +44,14 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(discovery_info.upnp[ATTR_UPNP_UDN])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
self.url = url_normalize(
|
||||
discovery_info.upnp.get(
|
||||
ATTR_UPNP_PRESENTATION_URL,
|
||||
f"http://{urlparse(discovery_info.ssdp_location or '').hostname}/",
|
||||
)
|
||||
norm_url = url_normalize(
|
||||
discovery_info.upnp.get(ATTR_UPNP_PRESENTATION_URL)
|
||||
or f"http://{urlparse(discovery_info.ssdp_location or '').hostname}/"
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
# url_normalize only returns None if passed None, and we don't do that
|
||||
assert norm_url is not None
|
||||
self.url = norm_url
|
||||
|
||||
for existing_entry in (
|
||||
x for x in self._async_current_entries() if x.data[CONF_URL] == self.url
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/syncthru",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysyncthru"],
|
||||
"requirements": ["PySyncThru==0.8.0", "url-normalize==2.2.0"],
|
||||
"requirements": ["PySyncThru==0.8.0", "url-normalize==2.2.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:Printer:1",
|
||||
|
||||
@@ -9,6 +9,7 @@ import logging
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
from synology_dsm import SynologyDSM
|
||||
from synology_dsm.api.core.external_usb import SynoCoreExternalUSB
|
||||
from synology_dsm.api.core.security import SynoCoreSecurity
|
||||
from synology_dsm.api.core.system import SynoCoreSystem
|
||||
from synology_dsm.api.core.upgrade import SynoCoreUpgrade
|
||||
@@ -78,6 +79,7 @@ class SynoApi:
|
||||
self.system: SynoCoreSystem | None = None
|
||||
self.upgrade: SynoCoreUpgrade | None = None
|
||||
self.utilisation: SynoCoreUtilization | None = None
|
||||
self.external_usb: SynoCoreExternalUSB | None = None
|
||||
|
||||
# Should we fetch them
|
||||
self._fetching_entities: dict[str, set[str]] = {}
|
||||
@@ -90,6 +92,7 @@ class SynoApi:
|
||||
self._with_system = True
|
||||
self._with_upgrade = True
|
||||
self._with_utilisation = True
|
||||
self._with_external_usb = True
|
||||
|
||||
self._login_future: asyncio.Future[None] | None = None
|
||||
|
||||
@@ -261,6 +264,9 @@ class SynoApi:
|
||||
self._with_information = bool(
|
||||
self._fetching_entities.get(SynoDSMInformation.API_KEY)
|
||||
)
|
||||
self._with_external_usb = bool(
|
||||
self._fetching_entities.get(SynoCoreExternalUSB.API_KEY)
|
||||
)
|
||||
|
||||
# Reset not used API, information is not reset since it's used in device_info
|
||||
if not self._with_security:
|
||||
@@ -322,6 +328,15 @@ class SynoApi:
|
||||
self.dsm.reset(self.utilisation)
|
||||
self.utilisation = None
|
||||
|
||||
if not self._with_external_usb:
|
||||
LOGGER.debug(
|
||||
"Disable external usb api from being updated for '%s'",
|
||||
self._entry.unique_id,
|
||||
)
|
||||
if self.external_usb:
|
||||
self.dsm.reset(self.external_usb)
|
||||
self.external_usb = None
|
||||
|
||||
async def _fetch_device_configuration(self) -> None:
|
||||
"""Fetch initial device config."""
|
||||
self.network = self.dsm.network
|
||||
@@ -366,6 +381,12 @@ class SynoApi:
|
||||
)
|
||||
self.surveillance_station = self.dsm.surveillance_station
|
||||
|
||||
if self._with_external_usb:
|
||||
LOGGER.debug(
|
||||
"Enable external usb api updates for '%s'", self._entry.unique_id
|
||||
)
|
||||
self.external_usb = self.dsm.external_usb
|
||||
|
||||
async def _syno_api_executer(self, api_call: Callable) -> None:
|
||||
"""Synology api call wrapper."""
|
||||
try:
|
||||
|
||||
@@ -32,6 +32,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"uptime": dsm_info.uptime,
|
||||
"temperature": dsm_info.temperature,
|
||||
},
|
||||
"external_usb": {"devices": {}, "partitions": {}},
|
||||
"network": {"interfaces": {}},
|
||||
"storage": {"disks": {}, "volumes": {}},
|
||||
"surveillance_station": {"cameras": {}, "camera_diagnostics": {}},
|
||||
@@ -43,6 +44,27 @@ async def async_get_config_entry_diagnostics(
|
||||
},
|
||||
}
|
||||
|
||||
if syno_api.external_usb is not None:
|
||||
for device in syno_api.external_usb.get_devices.values():
|
||||
if device is not None:
|
||||
diag_data["external_usb"]["devices"][device.device_id] = {
|
||||
"name": device.device_name,
|
||||
"manufacturer": device.device_manufacturer,
|
||||
"model": device.device_product_name,
|
||||
"type": device.device_type,
|
||||
"status": device.device_status,
|
||||
"size_total": device.device_size_total(False),
|
||||
}
|
||||
for partition in device.device_partitions.values():
|
||||
if partition is not None:
|
||||
diag_data["external_usb"]["partitions"][partition.name_id] = {
|
||||
"name": partition.partition_title,
|
||||
"filesystem": partition.filesystem,
|
||||
"share_name": partition.share_name,
|
||||
"size_used": partition.partition_size_used(False),
|
||||
"size_total": partition.partition_size_total(False),
|
||||
}
|
||||
|
||||
if syno_api.network is not None:
|
||||
for intf in syno_api.network.interfaces:
|
||||
diag_data["network"]["interfaces"][intf["id"]] = {
|
||||
|
||||
@@ -93,6 +93,7 @@ class SynologyDSMDeviceEntity(
|
||||
storage = api.storage
|
||||
information = api.information
|
||||
network = api.network
|
||||
external_usb = api.external_usb
|
||||
assert information is not None
|
||||
assert storage is not None
|
||||
assert network is not None
|
||||
@@ -121,6 +122,26 @@ class SynologyDSMDeviceEntity(
|
||||
self._device_model = disk["model"].strip()
|
||||
self._device_firmware = disk["firm"]
|
||||
self._device_type = disk["diskType"]
|
||||
elif "device" in description.key:
|
||||
assert self._device_id is not None
|
||||
assert external_usb is not None
|
||||
for device in external_usb.get_devices.values():
|
||||
if device.device_name == self._device_id:
|
||||
self._device_name = device.device_name
|
||||
self._device_manufacturer = device.device_manufacturer
|
||||
self._device_model = device.device_product_name
|
||||
self._device_type = device.device_type
|
||||
break
|
||||
elif "partition" in description.key:
|
||||
assert self._device_id is not None
|
||||
assert external_usb is not None
|
||||
for device in external_usb.get_devices.values():
|
||||
for partition in device.device_partitions.values():
|
||||
if partition.partition_title == self._device_id:
|
||||
self._device_name = partition.partition_title
|
||||
self._device_manufacturer = "Synology"
|
||||
self._device_model = partition.filesystem
|
||||
break
|
||||
|
||||
self._attr_unique_id += f"_{self._device_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
||||
@@ -22,6 +22,12 @@
|
||||
"cpu_15min_load": {
|
||||
"default": "mdi:chip"
|
||||
},
|
||||
"device_size_total": {
|
||||
"default": "mdi:chart-pie"
|
||||
},
|
||||
"device_status": {
|
||||
"default": "mdi:checkbox-marked-circle-outline"
|
||||
},
|
||||
"memory_real_usage": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
@@ -49,6 +55,15 @@
|
||||
"network_down": {
|
||||
"default": "mdi:download"
|
||||
},
|
||||
"partition_percentage_used": {
|
||||
"default": "mdi:chart-pie"
|
||||
},
|
||||
"partition_size_total": {
|
||||
"default": "mdi:chart-pie"
|
||||
},
|
||||
"partition_size_used": {
|
||||
"default": "mdi:chart-pie"
|
||||
},
|
||||
"volume_status": {
|
||||
"default": "mdi:checkbox-marked-circle-outline",
|
||||
"state": {
|
||||
|
||||
@@ -6,6 +6,7 @@ from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from typing import cast
|
||||
|
||||
from synology_dsm.api.core.external_usb import SynoCoreExternalUSB
|
||||
from synology_dsm.api.core.utilization import SynoCoreUtilization
|
||||
from synology_dsm.api.dsm.information import SynoDSMInformation
|
||||
from synology_dsm.api.storage.storage import SynoStorage
|
||||
@@ -17,6 +18,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICES,
|
||||
CONF_DISKS,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
@@ -261,6 +263,53 @@ STORAGE_DISK_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
EXTERNAL_USB_DISK_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = (
|
||||
SynologyDSMSensorEntityDescription(
|
||||
api_key=SynoCoreExternalUSB.API_KEY,
|
||||
key="device_status",
|
||||
translation_key="device_status",
|
||||
),
|
||||
SynologyDSMSensorEntityDescription(
|
||||
api_key=SynoCoreExternalUSB.API_KEY,
|
||||
key="device_size_total",
|
||||
translation_key="device_size_total",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
EXTERNAL_USB_PARTITION_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = (
|
||||
SynologyDSMSensorEntityDescription(
|
||||
api_key=SynoCoreExternalUSB.API_KEY,
|
||||
key="partition_size_total",
|
||||
translation_key="partition_size_total",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SynologyDSMSensorEntityDescription(
|
||||
api_key=SynoCoreExternalUSB.API_KEY,
|
||||
key="partition_size_used",
|
||||
translation_key="partition_size_used",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SynologyDSMSensorEntityDescription(
|
||||
api_key=SynoCoreExternalUSB.API_KEY,
|
||||
key="partition_percentage_used",
|
||||
translation_key="partition_percentage_used",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
INFORMATION_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = (
|
||||
SynologyDSMSensorEntityDescription(
|
||||
@@ -294,8 +343,14 @@ async def async_setup_entry(
|
||||
coordinator = data.coordinator_central
|
||||
storage = api.storage
|
||||
assert storage is not None
|
||||
external_usb = api.external_usb
|
||||
|
||||
entities: list[SynoDSMUtilSensor | SynoDSMStorageSensor | SynoDSMInfoSensor] = [
|
||||
entities: list[
|
||||
SynoDSMUtilSensor
|
||||
| SynoDSMStorageSensor
|
||||
| SynoDSMInfoSensor
|
||||
| SynoDSMExternalUSBSensor
|
||||
] = [
|
||||
SynoDSMUtilSensor(api, coordinator, description)
|
||||
for description in UTILISATION_SENSORS
|
||||
]
|
||||
@@ -320,6 +375,32 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
# Handle all external usb
|
||||
if external_usb is not None and external_usb.get_devices:
|
||||
entities.extend(
|
||||
[
|
||||
SynoDSMExternalUSBSensor(
|
||||
api, coordinator, description, device.device_name
|
||||
)
|
||||
for device in entry.data.get(
|
||||
CONF_DEVICES, external_usb.get_devices.values()
|
||||
)
|
||||
for description in EXTERNAL_USB_DISK_SENSORS
|
||||
]
|
||||
)
|
||||
entities.extend(
|
||||
[
|
||||
SynoDSMExternalUSBSensor(
|
||||
api, coordinator, description, partition.partition_title
|
||||
)
|
||||
for device in entry.data.get(
|
||||
CONF_DEVICES, external_usb.get_devices.values()
|
||||
)
|
||||
for partition in device.device_partitions.values()
|
||||
for description in EXTERNAL_USB_PARTITION_SENSORS
|
||||
]
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
[
|
||||
SynoDSMInfoSensor(api, coordinator, description)
|
||||
@@ -396,6 +477,45 @@ class SynoDSMStorageSensor(SynologyDSMDeviceEntity, SynoDSMSensor):
|
||||
)
|
||||
|
||||
|
||||
class SynoDSMExternalUSBSensor(SynologyDSMDeviceEntity, SynoDSMSensor):
|
||||
"""Representation a Synology Storage sensor."""
|
||||
|
||||
entity_description: SynologyDSMSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api: SynoApi,
|
||||
coordinator: SynologyDSMCentralUpdateCoordinator,
|
||||
description: SynologyDSMSensorEntityDescription,
|
||||
device_id: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize the Synology DSM external usb sensor entity."""
|
||||
super().__init__(api, coordinator, description, device_id)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state."""
|
||||
external_usb = self._api.external_usb
|
||||
assert external_usb is not None
|
||||
if "device" in self.entity_description.key:
|
||||
for device in external_usb.get_devices.values():
|
||||
if device.device_name == self._device_id:
|
||||
attr = getattr(device, self.entity_description.key)
|
||||
break
|
||||
elif "partition" in self.entity_description.key:
|
||||
for device in external_usb.get_devices.values():
|
||||
for partition in device.device_partitions.values():
|
||||
if partition.partition_title == self._device_id:
|
||||
attr = getattr(partition, self.entity_description.key)
|
||||
break
|
||||
if callable(attr):
|
||||
attr = attr()
|
||||
if attr is None:
|
||||
return None
|
||||
|
||||
return attr # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class SynoDSMInfoSensor(SynoDSMSensor):
|
||||
"""Representation a Synology information sensor."""
|
||||
|
||||
|
||||
@@ -113,6 +113,12 @@
|
||||
"cpu_user_load": {
|
||||
"name": "CPU utilization (user)"
|
||||
},
|
||||
"device_size_total": {
|
||||
"name": "Device size"
|
||||
},
|
||||
"device_status": {
|
||||
"name": "Status"
|
||||
},
|
||||
"disk_smart_status": {
|
||||
"name": "Status (smart)"
|
||||
},
|
||||
@@ -149,6 +155,15 @@
|
||||
"network_up": {
|
||||
"name": "Upload throughput"
|
||||
},
|
||||
"partition_percentage_used": {
|
||||
"name": "Partition used"
|
||||
},
|
||||
"partition_size_total": {
|
||||
"name": "Partition size"
|
||||
},
|
||||
"partition_size_used": {
|
||||
"name": "Partition used space"
|
||||
},
|
||||
"temperature": {
|
||||
"name": "[%key:component::sensor::entity_component::temperature::name%]"
|
||||
},
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_NAME,
|
||||
CONF_TRIGGERS,
|
||||
CONF_UNIQUE_ID,
|
||||
SERVICE_RELOAD,
|
||||
)
|
||||
@@ -27,7 +28,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, CONF_TRIGGER, DOMAIN, PLATFORMS
|
||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN, PLATFORMS
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .helpers import async_get_blueprints
|
||||
|
||||
@@ -136,7 +137,7 @@ async def _process_config(hass: HomeAssistant, hass_config: ConfigType) -> None:
|
||||
coordinator_tasks: list[Coroutine[Any, Any, TriggerUpdateCoordinator]] = []
|
||||
|
||||
for conf_section in hass_config[DOMAIN]:
|
||||
if CONF_TRIGGER in conf_section:
|
||||
if CONF_TRIGGERS in conf_section:
|
||||
coordinator_tasks.append(init_coordinator(hass, conf_section))
|
||||
continue
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -10,6 +11,7 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI
|
||||
from homeassistant.components.blueprint import (
|
||||
BLUEPRINT_INSTANCE_FIELDS,
|
||||
is_blueprint_instance_config,
|
||||
schemas as blueprint_schemas,
|
||||
)
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN
|
||||
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
|
||||
@@ -22,9 +24,15 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
|
||||
from homeassistant.config import async_log_schema_error, config_without_domain
|
||||
from homeassistant.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_BINARY_SENSORS,
|
||||
CONF_CONDITION,
|
||||
CONF_CONDITIONS,
|
||||
CONF_NAME,
|
||||
CONF_SENSORS,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
@@ -47,14 +55,7 @@ from . import (
|
||||
switch as switch_platform,
|
||||
weather as weather_platform,
|
||||
)
|
||||
from .const import (
|
||||
CONF_ACTION,
|
||||
CONF_CONDITION,
|
||||
CONF_TRIGGER,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
TemplateConfig,
|
||||
)
|
||||
from .const import DOMAIN, PLATFORMS, TemplateConfig
|
||||
from .helpers import async_get_blueprints
|
||||
|
||||
PACKAGE_MERGE_HINT = "list"
|
||||
@@ -67,7 +68,7 @@ def ensure_domains_do_not_have_trigger_or_action(*keys: str) -> Callable[[dict],
|
||||
def validate(obj: dict):
|
||||
options = set(obj.keys())
|
||||
if found_domains := domains.intersection(options):
|
||||
invalid = {CONF_TRIGGER, CONF_ACTION}
|
||||
invalid = {CONF_TRIGGERS, CONF_ACTIONS}
|
||||
if found_invalid := invalid.intersection(set(obj.keys())):
|
||||
raise vol.Invalid(
|
||||
f"Unsupported option(s) found for domain {found_domains.pop()}, please remove ({', '.join(found_invalid)}) from your configuration",
|
||||
@@ -78,13 +79,22 @@ def ensure_domains_do_not_have_trigger_or_action(*keys: str) -> Callable[[dict],
|
||||
return validate
|
||||
|
||||
|
||||
CONFIG_SECTION_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for automations."""
|
||||
|
||||
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
|
||||
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
|
||||
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
|
||||
|
||||
|
||||
CONFIG_SECTION_SCHEMA = vol.All(
|
||||
_backward_compat_schema,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(CONF_TRIGGER): cv.TRIGGER_SCHEMA,
|
||||
vol.Optional(CONF_CONDITION): cv.CONDITIONS_SCHEMA,
|
||||
vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_TRIGGERS): cv.TRIGGER_SCHEMA,
|
||||
vol.Optional(CONF_CONDITIONS): cv.CONDITIONS_SCHEMA,
|
||||
vol.Optional(CONF_ACTIONS): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA,
|
||||
vol.Optional(NUMBER_DOMAIN): vol.All(
|
||||
cv.ensure_list, [number_platform.NUMBER_SCHEMA]
|
||||
@@ -123,10 +133,12 @@ CONFIG_SECTION_SCHEMA = vol.Schema(
|
||||
cv.ensure_list, [cover_platform.COVER_SCHEMA]
|
||||
),
|
||||
},
|
||||
ensure_domains_do_not_have_trigger_or_action(
|
||||
BUTTON_DOMAIN, COVER_DOMAIN, LIGHT_DOMAIN, SWITCH_DOMAIN
|
||||
),
|
||||
)
|
||||
),
|
||||
ensure_domains_do_not_have_trigger_or_action(BUTTON_DOMAIN, COVER_DOMAIN),
|
||||
)
|
||||
|
||||
TEMPLATE_BLUEPRINT_SCHEMA = vol.All(
|
||||
_backward_compat_schema, blueprint_schemas.BLUEPRINT_SCHEMA
|
||||
)
|
||||
|
||||
TEMPLATE_BLUEPRINT_INSTANCE_SCHEMA = vol.Schema(
|
||||
@@ -169,7 +181,7 @@ async def _async_resolve_blueprints(
|
||||
# house input results for template entities. For Trigger based template entities
|
||||
# CONF_VARIABLES should not be removed because the variables are always
|
||||
# executed between the trigger and action.
|
||||
if CONF_TRIGGER not in config and CONF_VARIABLES in config:
|
||||
if CONF_TRIGGERS not in config and CONF_VARIABLES in config:
|
||||
config[platform][CONF_VARIABLES] = config.pop(CONF_VARIABLES)
|
||||
raw_config = dict(config)
|
||||
|
||||
@@ -187,14 +199,14 @@ async def async_validate_config_section(
|
||||
|
||||
validated_config = await _async_resolve_blueprints(hass, config)
|
||||
|
||||
if CONF_TRIGGER in validated_config:
|
||||
validated_config[CONF_TRIGGER] = await async_validate_trigger_config(
|
||||
hass, validated_config[CONF_TRIGGER]
|
||||
if CONF_TRIGGERS in validated_config:
|
||||
validated_config[CONF_TRIGGERS] = await async_validate_trigger_config(
|
||||
hass, validated_config[CONF_TRIGGERS]
|
||||
)
|
||||
|
||||
if CONF_CONDITION in validated_config:
|
||||
validated_config[CONF_CONDITION] = await async_validate_conditions_config(
|
||||
hass, validated_config[CONF_CONDITION]
|
||||
if CONF_CONDITIONS in validated_config:
|
||||
validated_config[CONF_CONDITIONS] = await async_validate_conditions_config(
|
||||
hass, validated_config[CONF_CONDITIONS]
|
||||
)
|
||||
|
||||
return validated_config
|
||||
|
||||
@@ -1,22 +1,18 @@
|
||||
"""Constants for the Template Platform Components."""
|
||||
|
||||
from homeassistant.components.blueprint import BLUEPRINT_SCHEMA
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
CONF_ACTION = "action"
|
||||
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
|
||||
CONF_ATTRIBUTES = "attributes"
|
||||
CONF_AVAILABILITY = "availability"
|
||||
CONF_AVAILABILITY_TEMPLATE = "availability_template"
|
||||
CONF_CONDITION = "condition"
|
||||
CONF_MAX = "max"
|
||||
CONF_MIN = "min"
|
||||
CONF_OBJECT_ID = "object_id"
|
||||
CONF_PICTURE = "picture"
|
||||
CONF_PRESS = "press"
|
||||
CONF_STEP = "step"
|
||||
CONF_TRIGGER = "trigger"
|
||||
CONF_TURN_OFF = "turn_off"
|
||||
CONF_TURN_ON = "turn_on"
|
||||
|
||||
@@ -41,8 +37,6 @@ PLATFORMS = [
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
||||
TEMPLATE_BLUEPRINT_SCHEMA = BLUEPRINT_SCHEMA
|
||||
|
||||
|
||||
class TemplateConfig(dict):
|
||||
"""Dummy class to allow adding attributes."""
|
||||
|
||||
@@ -5,7 +5,14 @@ import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.const import CONF_PATH, CONF_VARIABLES, EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_PATH,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
)
|
||||
from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import condition, discovery, trigger as trigger_helper
|
||||
from homeassistant.helpers.script import Script
|
||||
@@ -14,7 +21,7 @@ from homeassistant.helpers.trace import trace_get
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_ACTION, CONF_CONDITION, CONF_TRIGGER, DOMAIN, PLATFORMS
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,17 +91,17 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
async def _attach_triggers(self, start_event: Event | None = None) -> None:
|
||||
"""Attach the triggers."""
|
||||
if CONF_ACTION in self.config:
|
||||
if CONF_ACTIONS in self.config:
|
||||
self._script = Script(
|
||||
self.hass,
|
||||
self.config[CONF_ACTION],
|
||||
self.config[CONF_ACTIONS],
|
||||
self.name,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
if CONF_CONDITION in self.config:
|
||||
if CONF_CONDITIONS in self.config:
|
||||
self._cond_func = await condition.async_conditions_from_config(
|
||||
self.hass, self.config[CONF_CONDITION], _LOGGER, "template entity"
|
||||
self.hass, self.config[CONF_CONDITIONS], _LOGGER, "template entity"
|
||||
)
|
||||
|
||||
if start_event is not None:
|
||||
@@ -107,7 +114,7 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
self._unsub_trigger = await trigger_helper.async_initialize_triggers(
|
||||
self.hass,
|
||||
self.config[CONF_TRIGGER],
|
||||
self.config[CONF_TRIGGERS],
|
||||
action,
|
||||
DOMAIN,
|
||||
self.name,
|
||||
|
||||
@@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
|
||||
from .const import DOMAIN, TEMPLATE_BLUEPRINT_SCHEMA
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
|
||||
DATA_BLUEPRINTS = "template_blueprints"
|
||||
@@ -54,6 +54,9 @@ async def _reload_blueprint_templates(hass: HomeAssistant, blueprint_path: str)
|
||||
@callback
|
||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||
"""Get template blueprints."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from .config import TEMPLATE_BLUEPRINT_SCHEMA
|
||||
|
||||
return blueprint.DomainBlueprints(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -18,6 +19,7 @@ from homeassistant.components.light import (
|
||||
ATTR_TRANSITION,
|
||||
DEFAULT_MAX_KELVIN,
|
||||
DEFAULT_MIN_KELVIN,
|
||||
DOMAIN as LIGHT_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA,
|
||||
ColorMode,
|
||||
@@ -46,6 +48,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_OBJECT_ID, CONF_PICTURE, DOMAIN
|
||||
from .template_entity import (
|
||||
LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS,
|
||||
@@ -55,6 +58,7 @@ from .template_entity import (
|
||||
TemplateEntity,
|
||||
rewrite_common_legacy_to_modern_conf,
|
||||
)
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
||||
@@ -253,6 +257,13 @@ async def async_setup_platform(
|
||||
)
|
||||
return
|
||||
|
||||
if "coordinator" in discovery_info:
|
||||
async_add_entities(
|
||||
TriggerLightEntity(hass, discovery_info["coordinator"], config)
|
||||
for config in discovery_info["entities"]
|
||||
)
|
||||
return
|
||||
|
||||
_async_create_template_tracking_entities(
|
||||
async_add_entities,
|
||||
hass,
|
||||
@@ -261,27 +272,17 @@ async def async_setup_platform(
|
||||
)
|
||||
|
||||
|
||||
class LightTemplate(TemplateEntity, LightEntity):
|
||||
"""Representation of a templated Light, including dimmable."""
|
||||
|
||||
_attr_should_poll = False
|
||||
class AbstractTemplateLight(LightEntity):
|
||||
"""Representation of a template lights features."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config: dict[str, Any],
|
||||
unique_id: str | None,
|
||||
self, config: dict[str, Any], initial_state: bool | None = False
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
"""Initialize the features."""
|
||||
|
||||
self._registered_scripts: list[str] = []
|
||||
|
||||
# Template attributes
|
||||
self._template = config.get(CONF_STATE)
|
||||
self._level_template = config.get(CONF_LEVEL)
|
||||
self._temperature_template = config.get(CONF_TEMPERATURE)
|
||||
@@ -295,12 +296,8 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
self._min_mireds_template = config.get(CONF_MIN_MIREDS)
|
||||
self._supports_transition_template = config.get(CONF_SUPPORTS_TRANSITION)
|
||||
|
||||
for action_id in (CONF_ON_ACTION, CONF_OFF_ACTION, CONF_EFFECT_ACTION):
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
|
||||
self._state = False
|
||||
# Stored values for template attributes
|
||||
self._state = initial_state
|
||||
self._brightness = None
|
||||
self._temperature: int | None = None
|
||||
self._hs_color = None
|
||||
@@ -309,14 +306,19 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
self._rgbww_color = None
|
||||
self._effect = None
|
||||
self._effect_list = None
|
||||
self._color_mode = None
|
||||
self._max_mireds = None
|
||||
self._min_mireds = None
|
||||
self._supports_transition = False
|
||||
self._supported_color_modes = None
|
||||
self._color_mode: ColorMode | None = None
|
||||
self._supported_color_modes: set[ColorMode] | None = None
|
||||
|
||||
color_modes = {ColorMode.ONOFF}
|
||||
def _register_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], ColorMode | None]]:
|
||||
for action_id, color_mode in (
|
||||
(CONF_ON_ACTION, None),
|
||||
(CONF_OFF_ACTION, None),
|
||||
(CONF_EFFECT_ACTION, None),
|
||||
(CONF_TEMPERATURE_ACTION, ColorMode.COLOR_TEMP),
|
||||
(CONF_LEVEL_ACTION, ColorMode.BRIGHTNESS),
|
||||
(CONF_HS_ACTION, ColorMode.HS),
|
||||
@@ -324,21 +326,9 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
(CONF_RGBW_ACTION, ColorMode.RGBW),
|
||||
(CONF_RGBWW_ACTION, ColorMode.RGBWW),
|
||||
):
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
color_modes.add(color_mode)
|
||||
self._supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
if len(self._supported_color_modes) > 1:
|
||||
self._color_mode = ColorMode.UNKNOWN
|
||||
if len(self._supported_color_modes) == 1:
|
||||
self._color_mode = next(iter(self._supported_color_modes))
|
||||
|
||||
self._attr_supported_features = LightEntityFeature(0)
|
||||
if (self._action_scripts.get(CONF_EFFECT_ACTION)) is not None:
|
||||
self._attr_supported_features |= LightEntityFeature.EFFECT
|
||||
if self._supports_transition is True:
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
self._registered_scripts.append(action_id)
|
||||
yield (action_id, action_config, color_mode)
|
||||
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
@@ -413,107 +403,12 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
"""Return true if device is on."""
|
||||
return self._state
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template:
|
||||
self.add_template_attribute(
|
||||
"_state", self._template, None, self._update_state
|
||||
)
|
||||
if self._level_template:
|
||||
self.add_template_attribute(
|
||||
"_brightness",
|
||||
self._level_template,
|
||||
None,
|
||||
self._update_brightness,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._max_mireds_template:
|
||||
self.add_template_attribute(
|
||||
"_max_mireds_template",
|
||||
self._max_mireds_template,
|
||||
None,
|
||||
self._update_max_mireds,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._min_mireds_template:
|
||||
self.add_template_attribute(
|
||||
"_min_mireds_template",
|
||||
self._min_mireds_template,
|
||||
None,
|
||||
self._update_min_mireds,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_temperature",
|
||||
self._temperature_template,
|
||||
None,
|
||||
self._update_temperature,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._hs_template:
|
||||
self.add_template_attribute(
|
||||
"_hs_color",
|
||||
self._hs_template,
|
||||
None,
|
||||
self._update_hs,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgb_template:
|
||||
self.add_template_attribute(
|
||||
"_rgb_color",
|
||||
self._rgb_template,
|
||||
None,
|
||||
self._update_rgb,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgbw_template:
|
||||
self.add_template_attribute(
|
||||
"_rgbw_color",
|
||||
self._rgbw_template,
|
||||
None,
|
||||
self._update_rgbw,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgbww_template:
|
||||
self.add_template_attribute(
|
||||
"_rgbww_color",
|
||||
self._rgbww_template,
|
||||
None,
|
||||
self._update_rgbww,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._effect_list_template:
|
||||
self.add_template_attribute(
|
||||
"_effect_list",
|
||||
self._effect_list_template,
|
||||
None,
|
||||
self._update_effect_list,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._effect_template:
|
||||
self.add_template_attribute(
|
||||
"_effect",
|
||||
self._effect_template,
|
||||
None,
|
||||
self._update_effect,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._supports_transition_template:
|
||||
self.add_template_attribute(
|
||||
"_supports_transition_template",
|
||||
self._supports_transition_template,
|
||||
None,
|
||||
self._update_supports_transition,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
def set_optimistic_attributes(self, **kwargs) -> bool: # noqa: C901
|
||||
"""Update attributes which should be set optimistically.
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None: # noqa: C901
|
||||
"""Turn the light on."""
|
||||
Returns True if any attribute was updated.
|
||||
"""
|
||||
optimistic_set = False
|
||||
# set optimistic states
|
||||
if self._template is None:
|
||||
self._state = True
|
||||
optimistic_set = True
|
||||
@@ -613,6 +508,10 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
self._rgbw_color = None
|
||||
optimistic_set = True
|
||||
|
||||
return optimistic_set
|
||||
|
||||
def get_registered_script(self, **kwargs) -> tuple[str, dict]:
|
||||
"""Get registered script for turn_on."""
|
||||
common_params = {}
|
||||
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
@@ -621,24 +520,23 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
if ATTR_TRANSITION in kwargs and self._supports_transition is True:
|
||||
common_params["transition"] = kwargs[ATTR_TRANSITION]
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs and (
|
||||
temperature_script := self._action_scripts.get(CONF_TEMPERATURE_ACTION)
|
||||
if (
|
||||
ATTR_COLOR_TEMP_KELVIN in kwargs
|
||||
and (script := CONF_TEMPERATURE_ACTION) in self._registered_scripts
|
||||
):
|
||||
common_params["color_temp"] = color_util.color_temperature_kelvin_to_mired(
|
||||
kwargs[ATTR_COLOR_TEMP_KELVIN]
|
||||
)
|
||||
|
||||
await self.async_run_script(
|
||||
temperature_script,
|
||||
run_variables=common_params,
|
||||
context=self._context,
|
||||
)
|
||||
elif ATTR_EFFECT in kwargs and (
|
||||
effect_script := self._action_scripts.get(CONF_EFFECT_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_EFFECT in kwargs
|
||||
and (script := CONF_EFFECT_ACTION) in self._registered_scripts
|
||||
):
|
||||
assert self._effect_list is not None
|
||||
effect = kwargs[ATTR_EFFECT]
|
||||
if effect not in self._effect_list:
|
||||
if self._effect_list is not None and effect not in self._effect_list:
|
||||
_LOGGER.error(
|
||||
"Received invalid effect: %s for entity %s. Expected one of: %s",
|
||||
effect,
|
||||
@@ -649,22 +547,22 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
|
||||
common_params["effect"] = effect
|
||||
|
||||
await self.async_run_script(
|
||||
effect_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
elif ATTR_HS_COLOR in kwargs and (
|
||||
hs_script := self._action_scripts.get(CONF_HS_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_HS_COLOR in kwargs
|
||||
and (script := CONF_HS_ACTION) in self._registered_scripts
|
||||
):
|
||||
hs_value = kwargs[ATTR_HS_COLOR]
|
||||
common_params["hs"] = hs_value
|
||||
common_params["h"] = int(hs_value[0])
|
||||
common_params["s"] = int(hs_value[1])
|
||||
|
||||
await self.async_run_script(
|
||||
hs_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
elif ATTR_RGBWW_COLOR in kwargs and (
|
||||
rgbww_script := self._action_scripts.get(CONF_RGBWW_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_RGBWW_COLOR in kwargs
|
||||
and (script := CONF_RGBWW_ACTION) in self._registered_scripts
|
||||
):
|
||||
rgbww_value = kwargs[ATTR_RGBWW_COLOR]
|
||||
common_params["rgbww"] = rgbww_value
|
||||
@@ -679,11 +577,11 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
common_params["cw"] = int(rgbww_value[3])
|
||||
common_params["ww"] = int(rgbww_value[4])
|
||||
|
||||
await self.async_run_script(
|
||||
rgbww_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
elif ATTR_RGBW_COLOR in kwargs and (
|
||||
rgbw_script := self._action_scripts.get(CONF_RGBW_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_RGBW_COLOR in kwargs
|
||||
and (script := CONF_RGBW_ACTION) in self._registered_scripts
|
||||
):
|
||||
rgbw_value = kwargs[ATTR_RGBW_COLOR]
|
||||
common_params["rgbw"] = rgbw_value
|
||||
@@ -697,11 +595,11 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
common_params["b"] = int(rgbw_value[2])
|
||||
common_params["w"] = int(rgbw_value[3])
|
||||
|
||||
await self.async_run_script(
|
||||
rgbw_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
elif ATTR_RGB_COLOR in kwargs and (
|
||||
rgb_script := self._action_scripts.get(CONF_RGB_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_RGB_COLOR in kwargs
|
||||
and (script := CONF_RGB_ACTION) in self._registered_scripts
|
||||
):
|
||||
rgb_value = kwargs[ATTR_RGB_COLOR]
|
||||
common_params["rgb"] = rgb_value
|
||||
@@ -709,39 +607,15 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
common_params["g"] = int(rgb_value[1])
|
||||
common_params["b"] = int(rgb_value[2])
|
||||
|
||||
await self.async_run_script(
|
||||
rgb_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
elif ATTR_BRIGHTNESS in kwargs and (
|
||||
level_script := self._action_scripts.get(CONF_LEVEL_ACTION)
|
||||
return (script, common_params)
|
||||
|
||||
if (
|
||||
ATTR_BRIGHTNESS in kwargs
|
||||
and (script := CONF_LEVEL_ACTION) in self._registered_scripts
|
||||
):
|
||||
await self.async_run_script(
|
||||
level_script, run_variables=common_params, context=self._context
|
||||
)
|
||||
else:
|
||||
await self.async_run_script(
|
||||
self._action_scripts[CONF_ON_ACTION],
|
||||
run_variables=common_params,
|
||||
context=self._context,
|
||||
)
|
||||
return (script, common_params)
|
||||
|
||||
if optimistic_set:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
off_script = self._action_scripts[CONF_OFF_ACTION]
|
||||
if ATTR_TRANSITION in kwargs and self._supports_transition is True:
|
||||
await self.async_run_script(
|
||||
off_script,
|
||||
run_variables={"transition": kwargs[ATTR_TRANSITION]},
|
||||
context=self._context,
|
||||
)
|
||||
else:
|
||||
await self.async_run_script(off_script, context=self._context)
|
||||
if self._template is None:
|
||||
self._state = False
|
||||
self.async_write_ha_state()
|
||||
return (CONF_ON_ACTION, common_params)
|
||||
|
||||
@callback
|
||||
def _update_brightness(self, brightness):
|
||||
@@ -809,33 +683,6 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
|
||||
self._effect = effect
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
"""Update the state from the template."""
|
||||
if isinstance(result, TemplateError):
|
||||
# This behavior is legacy
|
||||
self._state = False
|
||||
if not self._availability_template:
|
||||
self._attr_available = True
|
||||
return
|
||||
|
||||
if isinstance(result, bool):
|
||||
self._state = result
|
||||
return
|
||||
|
||||
state = str(result).lower()
|
||||
if state in _VALID_STATES:
|
||||
self._state = state in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
_LOGGER.error(
|
||||
"Received invalid light is_on state: %s for entity %s. Expected: %s",
|
||||
state,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_STATES),
|
||||
)
|
||||
self._state = None
|
||||
|
||||
@callback
|
||||
def _update_temperature(self, render):
|
||||
"""Update the temperature from the template."""
|
||||
@@ -1092,3 +939,338 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
self._supports_transition = bool(render)
|
||||
if self._supports_transition:
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
|
||||
|
||||
class LightTemplate(TemplateEntity, AbstractTemplateLight):
|
||||
"""Representation of a templated Light, including dimmable."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config: dict[str, Any],
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
TemplateEntity.__init__(
|
||||
self, hass, config=config, fallback_name=None, unique_id=unique_id
|
||||
)
|
||||
AbstractTemplateLight.__init__(self, config)
|
||||
if (object_id := config.get(CONF_OBJECT_ID)) is not None:
|
||||
self.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id, hass=hass
|
||||
)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
color_modes = {ColorMode.ONOFF}
|
||||
for action_id, action_config, color_mode in self._register_scripts(config):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
if color_mode:
|
||||
color_modes.add(color_mode)
|
||||
|
||||
self._supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
if len(self._supported_color_modes) > 1:
|
||||
self._color_mode = ColorMode.UNKNOWN
|
||||
if len(self._supported_color_modes) == 1:
|
||||
self._color_mode = next(iter(self._supported_color_modes))
|
||||
|
||||
self._attr_supported_features = LightEntityFeature(0)
|
||||
if self._action_scripts.get(CONF_EFFECT_ACTION):
|
||||
self._attr_supported_features |= LightEntityFeature.EFFECT
|
||||
if self._supports_transition is True:
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template:
|
||||
self.add_template_attribute(
|
||||
"_state", self._template, None, self._update_state
|
||||
)
|
||||
if self._level_template:
|
||||
self.add_template_attribute(
|
||||
"_brightness",
|
||||
self._level_template,
|
||||
None,
|
||||
self._update_brightness,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._max_mireds_template:
|
||||
self.add_template_attribute(
|
||||
"_max_mireds_template",
|
||||
self._max_mireds_template,
|
||||
None,
|
||||
self._update_max_mireds,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._min_mireds_template:
|
||||
self.add_template_attribute(
|
||||
"_min_mireds_template",
|
||||
self._min_mireds_template,
|
||||
None,
|
||||
self._update_min_mireds,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_temperature",
|
||||
self._temperature_template,
|
||||
None,
|
||||
self._update_temperature,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._hs_template:
|
||||
self.add_template_attribute(
|
||||
"_hs_color",
|
||||
self._hs_template,
|
||||
None,
|
||||
self._update_hs,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgb_template:
|
||||
self.add_template_attribute(
|
||||
"_rgb_color",
|
||||
self._rgb_template,
|
||||
None,
|
||||
self._update_rgb,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgbw_template:
|
||||
self.add_template_attribute(
|
||||
"_rgbw_color",
|
||||
self._rgbw_template,
|
||||
None,
|
||||
self._update_rgbw,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._rgbww_template:
|
||||
self.add_template_attribute(
|
||||
"_rgbww_color",
|
||||
self._rgbww_template,
|
||||
None,
|
||||
self._update_rgbww,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._effect_list_template:
|
||||
self.add_template_attribute(
|
||||
"_effect_list",
|
||||
self._effect_list_template,
|
||||
None,
|
||||
self._update_effect_list,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._effect_template:
|
||||
self.add_template_attribute(
|
||||
"_effect",
|
||||
self._effect_template,
|
||||
None,
|
||||
self._update_effect,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._supports_transition_template:
|
||||
self.add_template_attribute(
|
||||
"_supports_transition_template",
|
||||
self._supports_transition_template,
|
||||
None,
|
||||
self._update_supports_transition,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
"""Update the state from the template."""
|
||||
if isinstance(result, TemplateError):
|
||||
# This behavior is legacy
|
||||
self._state = False
|
||||
if not self._availability_template:
|
||||
self._attr_available = True
|
||||
return
|
||||
|
||||
if isinstance(result, bool):
|
||||
self._state = result
|
||||
return
|
||||
|
||||
state = str(result).lower()
|
||||
if state in _VALID_STATES:
|
||||
self._state = state in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
_LOGGER.error(
|
||||
"Received invalid light is_on state: %s for entity %s. Expected: %s",
|
||||
state,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_STATES),
|
||||
)
|
||||
self._state = None
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
optimistic_set = self.set_optimistic_attributes(**kwargs)
|
||||
script_id, script_params = self.get_registered_script(**kwargs)
|
||||
await self.async_run_script(
|
||||
self._action_scripts[script_id],
|
||||
run_variables=script_params,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
if optimistic_set:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
off_script = self._action_scripts[CONF_OFF_ACTION]
|
||||
if ATTR_TRANSITION in kwargs and self._supports_transition is True:
|
||||
await self.async_run_script(
|
||||
off_script,
|
||||
run_variables={"transition": kwargs[ATTR_TRANSITION]},
|
||||
context=self._context,
|
||||
)
|
||||
else:
|
||||
await self.async_run_script(off_script, context=self._context)
|
||||
if self._template is None:
|
||||
self._state = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class TriggerLightEntity(TriggerEntity, AbstractTemplateLight):
|
||||
"""Light entity based on trigger data."""
|
||||
|
||||
domain = LIGHT_DOMAIN
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: TriggerUpdateCoordinator,
|
||||
config: ConfigType,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateLight.__init__(self, config, None)
|
||||
|
||||
# Render the _attr_name before initializing TemplateLightEntity
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
|
||||
self._optimistic_attrs: dict[str, str] = {}
|
||||
self._optimistic = True
|
||||
for key in (
|
||||
CONF_STATE,
|
||||
CONF_LEVEL,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_RGB,
|
||||
CONF_RGBW,
|
||||
CONF_RGBWW,
|
||||
CONF_EFFECT,
|
||||
CONF_MAX_MIREDS,
|
||||
CONF_MIN_MIREDS,
|
||||
CONF_SUPPORTS_TRANSITION,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
if key == CONF_STATE:
|
||||
self._optimistic = False
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
for key in (CONF_EFFECT_LIST, CONF_HS):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_complex.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
color_modes = {ColorMode.ONOFF}
|
||||
for action_id, action_config, color_mode in self._register_scripts(config):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
if color_mode:
|
||||
color_modes.add(color_mode)
|
||||
|
||||
self._supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
if len(self._supported_color_modes) > 1:
|
||||
self._color_mode = ColorMode.UNKNOWN
|
||||
if len(self._supported_color_modes) == 1:
|
||||
self._color_mode = next(iter(self._supported_color_modes))
|
||||
|
||||
self._attr_supported_features = LightEntityFeature(0)
|
||||
if self._action_scripts.get(CONF_EFFECT_ACTION):
|
||||
self._attr_supported_features |= LightEntityFeature.EFFECT
|
||||
if self._supports_transition is True:
|
||||
self._attr_supported_features |= LightEntityFeature.TRANSITION
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_LEVEL, self._update_brightness),
|
||||
(CONF_EFFECT_LIST, self._update_effect_list),
|
||||
(CONF_EFFECT, self._update_effect),
|
||||
(CONF_TEMPERATURE, self._update_temperature),
|
||||
(CONF_HS, self._update_hs),
|
||||
(CONF_RGB, self._update_rgb),
|
||||
(CONF_RGBW, self._update_rgbw),
|
||||
(CONF_RGBWW, self._update_rgbww),
|
||||
(CONF_MAX_MIREDS, self._update_max_mireds),
|
||||
(CONF_MIN_MIREDS, self._update_min_mireds),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if (rendered := self._rendered.get(CONF_SUPPORTS_TRANSITION)) is not None:
|
||||
self._update_supports_transition(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if not self._optimistic:
|
||||
raw = self._rendered.get(CONF_STATE)
|
||||
self._state = template.result_as_boolean(raw)
|
||||
|
||||
self.async_set_context(self.coordinator.data["context"])
|
||||
write_ha_state = True
|
||||
elif self._optimistic and len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
optimistic_set = self.set_optimistic_attributes(**kwargs)
|
||||
script_id, script_params = self.get_registered_script(**kwargs)
|
||||
if self._template and self._state is None:
|
||||
# Ensure an optimistic state is set on the entity when turn_on
|
||||
# is called and the main state hasn't rendered. This will only
|
||||
# occur when the state is unknown, the template hasn't triggered,
|
||||
# and turn_on is called.
|
||||
self._state = True
|
||||
|
||||
await self.async_run_script(
|
||||
self._action_scripts[script_id],
|
||||
run_variables=script_params,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
if optimistic_set:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
off_script = self._action_scripts[CONF_OFF_ACTION]
|
||||
if ATTR_TRANSITION in kwargs and self._supports_transition is True:
|
||||
await self.async_run_script(
|
||||
off_script,
|
||||
run_variables={"transition": kwargs[ATTR_TRANSITION]},
|
||||
context=self._context,
|
||||
)
|
||||
else:
|
||||
await self.async_run_script(off_script, context=self._context)
|
||||
if self._template is None:
|
||||
self._state = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -33,6 +33,8 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_SENSORS,
|
||||
CONF_STATE,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
@@ -53,12 +55,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import (
|
||||
CONF_ATTRIBUTE_TEMPLATES,
|
||||
CONF_AVAILABILITY_TEMPLATE,
|
||||
CONF_OBJECT_ID,
|
||||
CONF_TRIGGER,
|
||||
)
|
||||
from .const import CONF_ATTRIBUTE_TEMPLATES, CONF_AVAILABILITY_TEMPLATE, CONF_OBJECT_ID
|
||||
from .template_entity import (
|
||||
TEMPLATE_ENTITY_COMMON_SCHEMA,
|
||||
TemplateEntity,
|
||||
@@ -132,7 +129,7 @@ LEGACY_SENSOR_SCHEMA = vol.All(
|
||||
|
||||
def extra_validation_checks(val):
|
||||
"""Run extra validation checks."""
|
||||
if CONF_TRIGGER in val:
|
||||
if CONF_TRIGGERS in val or CONF_TRIGGER in val:
|
||||
raise vol.Invalid(
|
||||
"You can only add triggers to template entities if they are defined under"
|
||||
" `template:`. See the template documentation for more information:"
|
||||
@@ -170,6 +167,7 @@ PLATFORM_SCHEMA = vol.All(
|
||||
SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_TRIGGER): cv.match_all, # to raise custom warning
|
||||
vol.Optional(CONF_TRIGGERS): cv.match_all, # to raise custom warning
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(LEGACY_SENSOR_SCHEMA),
|
||||
}
|
||||
),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user