mirror of
https://github.com/home-assistant/core.git
synced 2026-02-28 04:51:41 +01:00
Compare commits
107 Commits
debounce-m
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
642864959a | ||
|
|
7ef6c34149 | ||
|
|
5b32e42b8c | ||
|
|
1be8b8e525 | ||
|
|
3fae15c430 | ||
|
|
c7e78568d0 | ||
|
|
492b542136 | ||
|
|
0f4852d8c2 | ||
|
|
737c0c1823 | ||
|
|
5fadcb01e9 | ||
|
|
2b4f46a739 | ||
|
|
44fe37da1f | ||
|
|
abd4e89577 | ||
|
|
033798835a | ||
|
|
83c77957c1 | ||
|
|
b1bc1dc102 | ||
|
|
40b8a2c380 | ||
|
|
fb23a6fbf8 | ||
|
|
faad3de02c | ||
|
|
5f30f532e5 | ||
|
|
667e8c4d38 | ||
|
|
74240ecd26 | ||
|
|
c81ee53265 | ||
|
|
8835f1d5e6 | ||
|
|
2ca84182d8 | ||
|
|
3f0d1bc071 | ||
|
|
350f462bdf | ||
|
|
2f98e68ed8 | ||
|
|
5b7fac94e5 | ||
|
|
c32ce3da5c | ||
|
|
0e1d1fbaed | ||
|
|
57d7f364f4 | ||
|
|
7cc5777b47 | ||
|
|
5e3f23b6a2 | ||
|
|
6873a40407 | ||
|
|
ddaa2fb293 | ||
|
|
53b6223459 | ||
|
|
7329cfb927 | ||
|
|
44b80dde0c | ||
|
|
8c125e4e4f | ||
|
|
227a258382 | ||
|
|
addc2a6766 | ||
|
|
97bcea9727 | ||
|
|
4f05c807b0 | ||
|
|
177a918c26 | ||
|
|
9705770c6c | ||
|
|
7309351165 | ||
|
|
d0401de70d | ||
|
|
6b89359a73 | ||
|
|
b31bafab99 | ||
|
|
84c556bb63 | ||
|
|
225ea02d9a | ||
|
|
ebd1cc994c | ||
|
|
9ec22ba158 | ||
|
|
2ff85d2134 | ||
|
|
3eb7f04510 | ||
|
|
54613ac8d9 | ||
|
|
044522a8ab | ||
|
|
19bf41496a | ||
|
|
a7efba098d | ||
|
|
042ad3b759 | ||
|
|
4270e4c793 | ||
|
|
cb11c22e76 | ||
|
|
c6e23fec93 | ||
|
|
553cecb397 | ||
|
|
bb7d5897d1 | ||
|
|
3e050ebe59 | ||
|
|
856a9e695a | ||
|
|
1944a8bd3a | ||
|
|
3f11af8084 | ||
|
|
46a87cd9dd | ||
|
|
f8a657cf01 | ||
|
|
75ed7b2fa2 | ||
|
|
e63e54820c | ||
|
|
37d2c946e8 | ||
|
|
e8a35ea69d | ||
|
|
28b950c64a | ||
|
|
e7cf6cbe72 | ||
|
|
5ad71453b8 | ||
|
|
ab9c8093c3 | ||
|
|
51acdeb563 | ||
|
|
bf60d57cc2 | ||
|
|
d94f15b985 | ||
|
|
8a621e6570 | ||
|
|
dd44b15b7b | ||
|
|
23ec28bbbf | ||
|
|
7a6a479b53 | ||
|
|
f9ffaad7f1 | ||
|
|
d4aa52ecc3 | ||
|
|
1b5eea5fae | ||
|
|
39dce8eb31 | ||
|
|
b651e62c7f | ||
|
|
1e807dc9da | ||
|
|
cba69e7e69 | ||
|
|
802a7aafec | ||
|
|
db5e7b3e3b | ||
|
|
75798bfb5e | ||
|
|
06a25de0d5 | ||
|
|
892da4a03e | ||
|
|
91e8e3da7a | ||
|
|
144b8768a1 | ||
|
|
cb6d86f86d | ||
|
|
422007577e | ||
|
|
7c2904bf48 | ||
|
|
3240fd7fc8 | ||
|
|
7dc2dff4e7 | ||
|
|
7e8de9bb9c |
@@ -34,6 +34,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/humidifier/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/image_processing/**
|
||||
- homeassistant/components/infrared/**
|
||||
- homeassistant/components/lawn_mower/**
|
||||
- homeassistant/components/light/**
|
||||
- homeassistant/components/lock/**
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -605,7 +605,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||
uses: actions/dependency-review-action@05fe4576374b728f0c523d6a13d64c25081e0803 # v4.8.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
|
||||
@@ -289,6 +289,7 @@ homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.inels.*
|
||||
homeassistant.components.infrared.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -401,8 +401,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
/tests/components/duotecno/ @cereal2nd
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192
|
||||
@@ -794,6 +792,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/inels/ @epdevlab
|
||||
/homeassistant/components/influxdb/ @mdegat01 @Robbie1221
|
||||
/tests/components/influxdb/ @mdegat01 @Robbie1221
|
||||
/homeassistant/components/infrared/ @home-assistant/core
|
||||
/tests/components/infrared/ @home-assistant/core
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
/tests/components/inkbird/ @bdraco
|
||||
/homeassistant/components/input_boolean/ @home-assistant/core
|
||||
@@ -1899,8 +1899,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wled/ @frenck @mik-laj
|
||||
/tests/components/wled/ @frenck @mik-laj
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
/tests/components/wmspro/ @mback2k
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
|
||||
@@ -12,10 +12,6 @@ from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import DOMAIN, DOMAIN_DATA, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
@@ -75,16 +71,13 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
DOMAIN, "change_setting", _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
DOMAIN, "capture_image", _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER_AUTOMATION,
|
||||
_trigger_automation,
|
||||
schema=AUTOMATION_SCHEMA,
|
||||
DOMAIN, "trigger_automation", _trigger_automation, schema=AUTOMATION_SCHEMA
|
||||
)
|
||||
|
||||
@@ -10,8 +10,6 @@ from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO = "set_time_to"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -20,7 +18,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
"set_time_to",
|
||||
entity_domain=SENSOR_DOMAIN,
|
||||
schema={vol.Required("minutes"): cv.positive_int},
|
||||
func="set_time_to",
|
||||
|
||||
@@ -8,18 +8,12 @@ from homeassistant.helpers import service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
"enable_alerts": "async_enable_alerts",
|
||||
"disable_alerts": "async_disable_alerts",
|
||||
"start_recording": "async_start_recording",
|
||||
"stop_recording": "async_stop_recording",
|
||||
"snapshot": "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,9 +13,6 @@ from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
@@ -26,7 +23,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
"alarm_toggle_chime",
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
@@ -37,7 +34,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
"alarm_keypress",
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
|
||||
@@ -16,9 +16,6 @@ from .coordinator import AmazonConfigEntry
|
||||
ATTR_TEXT_COMMAND = "text_command"
|
||||
ATTR_SOUND = "sound"
|
||||
ATTR_INFO_SKILL = "info_skill"
|
||||
SERVICE_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SOUND_NOTIFICATION = "send_sound"
|
||||
SERVICE_INFO_SKILL = "send_info_skill"
|
||||
|
||||
SCHEMA_SOUND_SERVICE = vol.Schema(
|
||||
{
|
||||
@@ -128,17 +125,17 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amazon Devices integration."""
|
||||
for service_name, method, schema in (
|
||||
(
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
"send_sound",
|
||||
async_send_sound_notification,
|
||||
SCHEMA_SOUND_SERVICE,
|
||||
),
|
||||
(
|
||||
SERVICE_TEXT_COMMAND,
|
||||
"send_text_command",
|
||||
async_send_text_command,
|
||||
SCHEMA_CUSTOM_COMMAND,
|
||||
),
|
||||
(
|
||||
SERVICE_INFO_SKILL,
|
||||
"send_info_skill",
|
||||
async_send_info_skill,
|
||||
SCHEMA_INFO_SKILL,
|
||||
),
|
||||
|
||||
@@ -16,8 +16,6 @@ ATTRIBUTION = "Data provided by Amber Electric"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
||||
@@ -22,7 +22,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
FEED_IN_CHANNEL,
|
||||
GENERAL_CHANNEL,
|
||||
SERVICE_GET_FORECASTS,
|
||||
)
|
||||
from .coordinator import AmberConfigEntry
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
@@ -101,7 +100,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_FORECASTS,
|
||||
"get_forecasts",
|
||||
handle_get_forecasts,
|
||||
GET_FORECASTS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
||||
@@ -49,18 +49,6 @@ SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"]
|
||||
|
||||
_SRV_EN_REC = "enable_recording"
|
||||
_SRV_DS_REC = "disable_recording"
|
||||
_SRV_EN_AUD = "enable_audio"
|
||||
_SRV_DS_AUD = "disable_audio"
|
||||
_SRV_EN_MOT_REC = "enable_motion_recording"
|
||||
_SRV_DS_MOT_REC = "disable_motion_recording"
|
||||
_SRV_GOTO = "goto_preset"
|
||||
_SRV_CBW = "set_color_bw"
|
||||
_SRV_TOUR_ON = "start_tour"
|
||||
_SRV_TOUR_OFF = "stop_tour"
|
||||
|
||||
_SRV_PTZ_CTRL = "ptz_control"
|
||||
_ATTR_PTZ_TT = "travel_time"
|
||||
_ATTR_PTZ_MOV = "movement"
|
||||
_MOV = [
|
||||
@@ -103,17 +91,17 @@ _SRV_PTZ_SCHEMA = _SRV_SCHEMA.extend(
|
||||
)
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
_SRV_EN_REC: (_SRV_SCHEMA, "async_enable_recording", ()),
|
||||
_SRV_DS_REC: (_SRV_SCHEMA, "async_disable_recording", ()),
|
||||
_SRV_EN_AUD: (_SRV_SCHEMA, "async_enable_audio", ()),
|
||||
_SRV_DS_AUD: (_SRV_SCHEMA, "async_disable_audio", ()),
|
||||
_SRV_EN_MOT_REC: (_SRV_SCHEMA, "async_enable_motion_recording", ()),
|
||||
_SRV_DS_MOT_REC: (_SRV_SCHEMA, "async_disable_motion_recording", ()),
|
||||
_SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
|
||||
_SRV_CBW: (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
|
||||
_SRV_TOUR_ON: (_SRV_SCHEMA, "async_start_tour", ()),
|
||||
_SRV_TOUR_OFF: (_SRV_SCHEMA, "async_stop_tour", ()),
|
||||
_SRV_PTZ_CTRL: (
|
||||
"enable_recording": (_SRV_SCHEMA, "async_enable_recording", ()),
|
||||
"disable_recording": (_SRV_SCHEMA, "async_disable_recording", ()),
|
||||
"enable_audio": (_SRV_SCHEMA, "async_enable_audio", ()),
|
||||
"disable_audio": (_SRV_SCHEMA, "async_disable_audio", ()),
|
||||
"enable_motion_recording": (_SRV_SCHEMA, "async_enable_motion_recording", ()),
|
||||
"disable_motion_recording": (_SRV_SCHEMA, "async_disable_motion_recording", ()),
|
||||
"goto_preset": (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
|
||||
"set_color_bw": (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
|
||||
"start_tour": (_SRV_SCHEMA, "async_start_tour", ()),
|
||||
"stop_tour": (_SRV_SCHEMA, "async_stop_tour", ()),
|
||||
"ptz_control": (
|
||||
_SRV_PTZ_SCHEMA,
|
||||
"async_ptz_control",
|
||||
(_ATTR_PTZ_MOV, _ATTR_PTZ_TT),
|
||||
|
||||
@@ -36,7 +36,7 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -271,7 +271,7 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
msg = (
|
||||
f"Output from service '{SERVICE_LEARN_SENDEVENT}' from"
|
||||
f"Output from service 'learn_sendevent' from"
|
||||
f" {self.entity_id}: '{output}'"
|
||||
)
|
||||
persistent_notification.async_create(
|
||||
|
||||
@@ -16,11 +16,6 @@ ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -29,7 +24,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADB_COMMAND,
|
||||
"adb_command",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
@@ -37,7 +32,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
"learn_sendevent",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
@@ -45,7 +40,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DOWNLOAD,
|
||||
"download",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
@@ -56,7 +51,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UPLOAD,
|
||||
"upload",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
|
||||
@@ -858,6 +858,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
]
|
||||
)
|
||||
messages.extend(new_messages)
|
||||
except anthropic.AuthenticationError as err:
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
raise HomeAssistantError(
|
||||
"Authentication error with Anthropic API, reauthentication required"
|
||||
) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
|
||||
55
homeassistant/components/aws_s3/diagnostics.py
Normal file
55
homeassistant/components/aws_s3/diagnostics.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Diagnostics support for AWS S3."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER as BACKUP_DATA_MANAGER,
|
||||
BackupManager,
|
||||
)
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_PREFIX,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import S3ConfigEntry
|
||||
from .helpers import async_list_backups_from_s3
|
||||
|
||||
TO_REDACT = (CONF_ACCESS_KEY_ID, CONF_SECRET_ACCESS_KEY)
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: S3ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
backup_manager: BackupManager = hass.data[BACKUP_DATA_MANAGER]
|
||||
backups = await async_list_backups_from_s3(
|
||||
coordinator.client,
|
||||
bucket=entry.data[CONF_BUCKET],
|
||||
prefix=entry.data.get(CONF_PREFIX, ""),
|
||||
)
|
||||
|
||||
data = {
|
||||
"coordinator_data": dataclasses.asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
"backup_agents": [
|
||||
{"name": agent.name}
|
||||
for agent in backup_manager.backup_agents.values()
|
||||
if agent.domain == DOMAIN
|
||||
],
|
||||
"backup": [backup.as_dict() for backup in backups],
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
||||
@@ -38,14 +38,14 @@ rules:
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: S3 is a cloud service that is not discovered on the network.
|
||||
|
||||
@@ -43,11 +43,11 @@
|
||||
"title": "The backup location {agent_id} is unavailable"
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Not all add-ons could be included in automatic backup"
|
||||
"description": "Apps {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Not all apps could be included in automatic backup"
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Apps which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Automatic backup was created with errors"
|
||||
},
|
||||
"automatic_backup_failed_create": {
|
||||
|
||||
@@ -64,6 +64,8 @@ SENSOR_TYPES: tuple[BSBLanSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: (
|
||||
data.sensor.total_energy.value
|
||||
if data.sensor.total_energy is not None
|
||||
|
||||
@@ -31,10 +31,6 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
|
||||
ATTR_SATURDAY_SLOTS = "saturday_slots"
|
||||
ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
|
||||
# Service names
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
SERVICE_SYNC_TIME = "sync_time"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
_SLOT_SCHEMA = vol.Schema(
|
||||
@@ -260,14 +256,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the BSB-LAN services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
"set_hot_water_schedule",
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SYNC_TIME,
|
||||
"sync_time",
|
||||
async_sync_time,
|
||||
schema=SYNC_TIME_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -807,6 +807,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
# The lovelace app loops media to prevent timing out, don't show that
|
||||
if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:
|
||||
return MediaPlayerState.PLAYING
|
||||
|
||||
if (media_status := self._media_status()[0]) is not None:
|
||||
if media_status.player_state == MEDIA_PLAYER_STATE_PLAYING:
|
||||
return MediaPlayerState.PLAYING
|
||||
@@ -817,19 +818,19 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
if media_status.player_is_idle:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
# If library consider us idle, that is our off state
|
||||
# it takes HDMI status into account for cast devices.
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.app_id in APP_IDS_UNRELIABLE_MEDIA_INFO:
|
||||
# Some apps don't report media status, show the player as playing
|
||||
return MediaPlayerState.PLAYING
|
||||
|
||||
if self.app_id is not None:
|
||||
if self.app_id is not None and self.app_id != pychromecast.config.APP_BACKDROP:
|
||||
# We have an active app
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
# If library consider us idle, that is our off state
|
||||
# it takes HDMI status into account for cast devices.
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -329,8 +329,8 @@
|
||||
"nano_nr_3": "Nano 3",
|
||||
"nano_nr_4": "Nano 4",
|
||||
"nano_nr_5": "Nano 5",
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"summer": "Summer",
|
||||
"winter": "Winter"
|
||||
}
|
||||
@@ -368,8 +368,8 @@
|
||||
"pump_status": {
|
||||
"name": "Pump status",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"on": "On"
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"return_circuit_temperature": {
|
||||
|
||||
@@ -115,7 +115,7 @@ def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError:
|
||||
except AttributeError, KeyError:
|
||||
return ([], [])
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
"""The Duke Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import DukeEnergyConfigEntry, DukeEnergyCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool:
|
||||
"""Set up Duke Energy from a config entry."""
|
||||
|
||||
coordinator = DukeEnergyCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
@@ -1,67 +0,0 @@
|
||||
"""Config flow for Duke Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiodukeenergy import DukeEnergy
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Duke Energy."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = DukeEnergy(
|
||||
user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session
|
||||
)
|
||||
try:
|
||||
auth = await api.authenticate()
|
||||
except ClientResponseError as e:
|
||||
errors["base"] = "invalid_auth" if e.status == 404 else "cannot_connect"
|
||||
except ClientError, TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
username = auth["internalUserID"].lower()
|
||||
await self.async_set_unique_id(username)
|
||||
self._abort_if_unique_id_configured()
|
||||
email = auth["loginEmailAddress"].lower()
|
||||
data = {
|
||||
CONF_EMAIL: email,
|
||||
CONF_USERNAME: username,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
self._async_abort_entries_match(data)
|
||||
return self.async_create_entry(title=email, data=data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Constants for the Duke Energy integration."""
|
||||
|
||||
DOMAIN = "duke_energy"
|
||||
@@ -1,222 +0,0 @@
|
||||
"""Coordinator to handle Duke Energy connections."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from aiodukeenergy import DukeEnergy
|
||||
from aiohttp import ClientError
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, UnitOfEnergy, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import EnergyConverter
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_SUPPORTED_METER_TYPES = ("ELECTRIC",)
|
||||
|
||||
type DukeEnergyConfigEntry = ConfigEntry[DukeEnergyCoordinator]
|
||||
|
||||
|
||||
class DukeEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Handle inserting statistics."""
|
||||
|
||||
config_entry: DukeEnergyConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: DukeEnergyConfigEntry
|
||||
) -> None:
|
||||
"""Initialize the data handler."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="Duke Energy",
|
||||
# Data is updated daily on Duke Energy.
|
||||
# Refresh every 12h to be at most 12h behind.
|
||||
update_interval=timedelta(hours=12),
|
||||
)
|
||||
self.api = DukeEnergy(
|
||||
config_entry.data[CONF_USERNAME],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
async_get_clientsession(hass),
|
||||
)
|
||||
self._statistic_ids: set = set()
|
||||
|
||||
@callback
|
||||
def _dummy_listener() -> None:
|
||||
pass
|
||||
|
||||
# Force the coordinator to periodically update by registering at least one listener.
|
||||
# Duke Energy does not provide forecast data, so all information is historical.
|
||||
# This makes _async_update_data get periodically called so we can insert statistics.
|
||||
self.async_add_listener(_dummy_listener)
|
||||
|
||||
self.config_entry.async_on_unload(self._clear_statistics)
|
||||
|
||||
def _clear_statistics(self) -> None:
|
||||
"""Clear statistics."""
|
||||
get_instance(self.hass).async_clear_statistics(list(self._statistic_ids))
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Insert Duke Energy statistics."""
|
||||
meters: dict[str, dict[str, Any]] = await self.api.get_meters()
|
||||
for serial_number, meter in meters.items():
|
||||
if (
|
||||
not isinstance(meter["serviceType"], str)
|
||||
or meter["serviceType"] not in _SUPPORTED_METER_TYPES
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported meter type %s", meter["serviceType"]
|
||||
)
|
||||
continue
|
||||
|
||||
id_prefix = f"{meter['serviceType'].lower()}_{serial_number}"
|
||||
consumption_statistic_id = f"{DOMAIN}:{id_prefix}_energy_consumption"
|
||||
self._statistic_ids.add(consumption_statistic_id)
|
||||
_LOGGER.debug(
|
||||
"Updating Statistics for %s",
|
||||
consumption_statistic_id,
|
||||
)
|
||||
|
||||
last_stat = await get_instance(self.hass).async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, consumption_statistic_id, True, set()
|
||||
)
|
||||
if not last_stat:
|
||||
_LOGGER.debug("Updating statistic for the first time")
|
||||
usage = await self._async_get_energy_usage(meter)
|
||||
consumption_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
usage = await self._async_get_energy_usage(
|
||||
meter,
|
||||
last_stat[consumption_statistic_id][0]["start"],
|
||||
)
|
||||
if not usage:
|
||||
_LOGGER.debug("No recent usage data. Skipping update")
|
||||
continue
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
min(usage.keys()),
|
||||
None,
|
||||
{consumption_statistic_id},
|
||||
"hour",
|
||||
None,
|
||||
{"sum"},
|
||||
)
|
||||
consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"])
|
||||
last_stats_time = stats[consumption_statistic_id][0]["start"]
|
||||
|
||||
consumption_statistics = []
|
||||
|
||||
for start, data in usage.items():
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
consumption_sum += data["energy"]
|
||||
|
||||
consumption_statistics.append(
|
||||
StatisticData(
|
||||
start=start, state=data["energy"], sum=consumption_sum
|
||||
)
|
||||
)
|
||||
|
||||
name_prefix = (
|
||||
f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}"
|
||||
)
|
||||
consumption_metadata = StatisticMetaData(
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{name_prefix} Consumption",
|
||||
source=DOMAIN,
|
||||
statistic_id=consumption_statistic_id,
|
||||
unit_class=EnergyConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR
|
||||
if meter["serviceType"] == "ELECTRIC"
|
||||
else UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Adding %s statistics for %s",
|
||||
len(consumption_statistics),
|
||||
consumption_statistic_id,
|
||||
)
|
||||
async_add_external_statistics(
|
||||
self.hass, consumption_metadata, consumption_statistics
|
||||
)
|
||||
|
||||
async def _async_get_energy_usage(
|
||||
self, meter: dict[str, Any], start_time: float | None = None
|
||||
) -> dict[datetime, dict[str, float | int]]:
|
||||
"""Get energy usage.
|
||||
|
||||
If start_time is None, get usage since account activation (or as far back as possible),
|
||||
otherwise since start_time - 30 days to allow corrections in data.
|
||||
|
||||
Duke Energy provides hourly data all the way back to ~3 years.
|
||||
"""
|
||||
|
||||
# All of Duke Energy Service Areas are currently in America/New_York timezone
|
||||
# May need to re-think this if that ever changes and determine timezone based
|
||||
# on the service address somehow.
|
||||
tz = await dt_util.async_get_time_zone("America/New_York")
|
||||
lookback = timedelta(days=30)
|
||||
one = timedelta(days=1)
|
||||
if start_time is None:
|
||||
# Max 3 years of data
|
||||
start = dt_util.now(tz) - timedelta(days=3 * 365)
|
||||
else:
|
||||
start = datetime.fromtimestamp(start_time, tz=tz) - lookback
|
||||
agreement_date = dt_util.parse_datetime(meter["agreementActiveDate"])
|
||||
if agreement_date is not None:
|
||||
start = max(agreement_date.replace(tzinfo=tz), start)
|
||||
|
||||
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
end = dt_util.now(tz).replace(hour=0, minute=0, second=0, microsecond=0) - one
|
||||
_LOGGER.debug("Data lookup range: %s - %s", start, end)
|
||||
|
||||
start_step = max(end - lookback, start)
|
||||
end_step = end
|
||||
usage: dict[datetime, dict[str, float | int]] = {}
|
||||
while True:
|
||||
_LOGGER.debug("Getting hourly usage: %s - %s", start_step, end_step)
|
||||
try:
|
||||
# Get data
|
||||
results = await self.api.get_energy_usage(
|
||||
meter["serialNum"], "HOURLY", "DAY", start_step, end_step
|
||||
)
|
||||
usage = {**results["data"], **usage}
|
||||
|
||||
for missing in results["missing"]:
|
||||
_LOGGER.debug("Missing data: %s", missing)
|
||||
|
||||
# Set next range
|
||||
end_step = start_step - one
|
||||
start_step = max(start_step - lookback, start)
|
||||
|
||||
# Make sure we don't go back too far
|
||||
if end_step < start:
|
||||
break
|
||||
except TimeoutError, ClientError:
|
||||
# ClientError is raised when there is no more data for the range
|
||||
break
|
||||
|
||||
_LOGGER.debug("Got %s meter usage reads", len(usage))
|
||||
return usage
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "duke_energy",
|
||||
"name": "Duke Energy",
|
||||
"codeowners": ["@hunterjm"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/duke_energy",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodukeenergy==0.3.0"]
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,8 @@ DUNEHD_PLAYER_SUPPORT: Final[MediaPlayerEntityFeature] = (
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -405,8 +405,13 @@ CT_SENSORS = (
|
||||
)
|
||||
for cttype, key in (
|
||||
(CtType.NET_CONSUMPTION, "lifetime_net_consumption"),
|
||||
# Production CT energy_delivered is not used
|
||||
(CtType.PRODUCTION, "production_ct_energy_delivered"),
|
||||
(CtType.STORAGE, "lifetime_battery_discharged"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_energy_delivered"),
|
||||
(CtType.BACKFEED, "backfeed_ct_energy_delivered"),
|
||||
(CtType.LOAD, "load_ct_energy_delivered"),
|
||||
(CtType.EVSE, "evse_ct_energy_delivered"),
|
||||
(CtType.PV3P, "pv3p_ct_energy_delivered"),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -423,8 +428,13 @@ CT_SENSORS = (
|
||||
)
|
||||
for cttype, key in (
|
||||
(CtType.NET_CONSUMPTION, "lifetime_net_production"),
|
||||
# Production CT energy_received is not used
|
||||
(CtType.PRODUCTION, "production_ct_energy_received"),
|
||||
(CtType.STORAGE, "lifetime_battery_charged"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_energy_received"),
|
||||
(CtType.BACKFEED, "backfeed_ct_energy_received"),
|
||||
(CtType.LOAD, "load_ct_energy_received"),
|
||||
(CtType.EVSE, "evse_ct_energy_received"),
|
||||
(CtType.PV3P, "pv3p_ct_energy_received"),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -441,8 +451,13 @@ CT_SENSORS = (
|
||||
)
|
||||
for cttype, key in (
|
||||
(CtType.NET_CONSUMPTION, "net_consumption"),
|
||||
# Production CT active_power is not used
|
||||
(CtType.PRODUCTION, "production_ct_power"),
|
||||
(CtType.STORAGE, "battery_discharge"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_power"),
|
||||
(CtType.BACKFEED, "backfeed_ct_power"),
|
||||
(CtType.LOAD, "load_ct_power"),
|
||||
(CtType.EVSE, "evse_ct_power"),
|
||||
(CtType.PV3P, "pv3p_ct_power"),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -461,6 +476,11 @@ CT_SENSORS = (
|
||||
(CtType.NET_CONSUMPTION, "frequency", "net_ct_frequency"),
|
||||
(CtType.PRODUCTION, "production_ct_frequency", ""),
|
||||
(CtType.STORAGE, "storage_ct_frequency", ""),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_frequency", ""),
|
||||
(CtType.BACKFEED, "backfeed_ct_frequency", ""),
|
||||
(CtType.LOAD, "load_ct_frequency", ""),
|
||||
(CtType.EVSE, "evse_ct_frequency", ""),
|
||||
(CtType.PV3P, "pv3p_ct_frequency", ""),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -480,6 +500,11 @@ CT_SENSORS = (
|
||||
(CtType.NET_CONSUMPTION, "voltage", "net_ct_voltage"),
|
||||
(CtType.PRODUCTION, "production_ct_voltage", ""),
|
||||
(CtType.STORAGE, "storage_voltage", "storage_ct_voltage"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_voltage", ""),
|
||||
(CtType.BACKFEED, "backfeed_ct_voltage", ""),
|
||||
(CtType.LOAD, "load_ct_voltage", ""),
|
||||
(CtType.EVSE, "evse_ct_voltage", ""),
|
||||
(CtType.PV3P, "pv3p_ct_voltage", ""),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -499,6 +524,11 @@ CT_SENSORS = (
|
||||
(CtType.NET_CONSUMPTION, "net_ct_current"),
|
||||
(CtType.PRODUCTION, "production_ct_current"),
|
||||
(CtType.STORAGE, "storage_ct_current"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_current"),
|
||||
(CtType.BACKFEED, "backfeed_ct_current"),
|
||||
(CtType.LOAD, "load_ct_current"),
|
||||
(CtType.EVSE, "evse_ct_current"),
|
||||
(CtType.PV3P, "pv3p_ct_current"),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -516,6 +546,11 @@ CT_SENSORS = (
|
||||
(CtType.NET_CONSUMPTION, "net_ct_powerfactor"),
|
||||
(CtType.PRODUCTION, "production_ct_powerfactor"),
|
||||
(CtType.STORAGE, "storage_ct_powerfactor"),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_powerfactor"),
|
||||
(CtType.BACKFEED, "backfeed_ct_powerfactor"),
|
||||
(CtType.LOAD, "load_ct_powerfactor"),
|
||||
(CtType.EVSE, "evse_ct_powerfactor"),
|
||||
(CtType.PV3P, "pv3p_ct_powerfactor"),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -537,6 +572,11 @@ CT_SENSORS = (
|
||||
),
|
||||
(CtType.PRODUCTION, "production_ct_metering_status", ""),
|
||||
(CtType.STORAGE, "storage_ct_metering_status", ""),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_metering_status", ""),
|
||||
(CtType.BACKFEED, "backfeed_ct_metering_status", ""),
|
||||
(CtType.LOAD, "load_ct_metering_status", ""),
|
||||
(CtType.EVSE, "evse_ct_metering_status", ""),
|
||||
(CtType.PV3P, "pv3p_ct_metering_status", ""),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
@@ -557,6 +597,11 @@ CT_SENSORS = (
|
||||
),
|
||||
(CtType.PRODUCTION, "production_ct_status_flags", ""),
|
||||
(CtType.STORAGE, "storage_ct_status_flags", ""),
|
||||
(CtType.TOTAL_CONSUMPTION, "total_consumption_ct_status_flags", ""),
|
||||
(CtType.BACKFEED, "backfeed_ct_status_flags", ""),
|
||||
(CtType.LOAD, "load_ct_status_flags", ""),
|
||||
(CtType.EVSE, "evse_ct_status_flags", ""),
|
||||
(CtType.PV3P, "pv3p_ct_status_flags", ""),
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
@@ -160,6 +160,60 @@
|
||||
"available_energy": {
|
||||
"name": "Available battery energy"
|
||||
},
|
||||
"backfeed_ct_current": {
|
||||
"name": "Backfeed CT current"
|
||||
},
|
||||
"backfeed_ct_current_phase": {
|
||||
"name": "Backfeed CT current {phase_name}"
|
||||
},
|
||||
"backfeed_ct_energy_delivered": {
|
||||
"name": "Backfeed CT energy delivered"
|
||||
},
|
||||
"backfeed_ct_energy_delivered_phase": {
|
||||
"name": "Backfeed CT energy delivered {phase_name}"
|
||||
},
|
||||
"backfeed_ct_energy_received": {
|
||||
"name": "Backfeed CT energy received"
|
||||
},
|
||||
"backfeed_ct_energy_received_phase": {
|
||||
"name": "Backfeed CT energy received {phase_name}"
|
||||
},
|
||||
"backfeed_ct_frequency": {
|
||||
"name": "Frequency backfeed CT"
|
||||
},
|
||||
"backfeed_ct_frequency_phase": {
|
||||
"name": "Frequency backfeed CT {phase_name}"
|
||||
},
|
||||
"backfeed_ct_metering_status": {
|
||||
"name": "Metering status backfeed CT"
|
||||
},
|
||||
"backfeed_ct_metering_status_phase": {
|
||||
"name": "Metering status backfeed CT {phase_name}"
|
||||
},
|
||||
"backfeed_ct_power": {
|
||||
"name": "Backfeed CT power"
|
||||
},
|
||||
"backfeed_ct_power_phase": {
|
||||
"name": "Backfeed CT power {phase_name}"
|
||||
},
|
||||
"backfeed_ct_powerfactor": {
|
||||
"name": "Power factor backfeed CT"
|
||||
},
|
||||
"backfeed_ct_powerfactor_phase": {
|
||||
"name": "Power factor backfeed CT {phase_name}"
|
||||
},
|
||||
"backfeed_ct_status_flags": {
|
||||
"name": "Meter status flags active backfeed CT"
|
||||
},
|
||||
"backfeed_ct_status_flags_phase": {
|
||||
"name": "Meter status flags active backfeed CT {phase_name}"
|
||||
},
|
||||
"backfeed_ct_voltage": {
|
||||
"name": "Voltage backfeed CT"
|
||||
},
|
||||
"backfeed_ct_voltage_phase": {
|
||||
"name": "Voltage backfeed CT {phase_name}"
|
||||
},
|
||||
"balanced_net_consumption": {
|
||||
"name": "Balanced net power consumption"
|
||||
},
|
||||
@@ -211,6 +265,60 @@
|
||||
"energy_today": {
|
||||
"name": "[%key:component::enphase_envoy::entity::sensor::daily_production::name%]"
|
||||
},
|
||||
"evse_ct_current": {
|
||||
"name": "EVSE CT current"
|
||||
},
|
||||
"evse_ct_current_phase": {
|
||||
"name": "EVSE CT current {phase_name}"
|
||||
},
|
||||
"evse_ct_energy_delivered": {
|
||||
"name": "EVSE CT energy delivered"
|
||||
},
|
||||
"evse_ct_energy_delivered_phase": {
|
||||
"name": "EVSE CT energy delivered {phase_name}"
|
||||
},
|
||||
"evse_ct_energy_received": {
|
||||
"name": "EVSE CT energy received"
|
||||
},
|
||||
"evse_ct_energy_received_phase": {
|
||||
"name": "EVSE CT energy received {phase_name}"
|
||||
},
|
||||
"evse_ct_frequency": {
|
||||
"name": "Frequency EVSE CT"
|
||||
},
|
||||
"evse_ct_frequency_phase": {
|
||||
"name": "Frequency EVSE CT {phase_name}"
|
||||
},
|
||||
"evse_ct_metering_status": {
|
||||
"name": "Metering status EVSE CT"
|
||||
},
|
||||
"evse_ct_metering_status_phase": {
|
||||
"name": "Metering status EVSE CT {phase_name}"
|
||||
},
|
||||
"evse_ct_power": {
|
||||
"name": "EVSE CT power"
|
||||
},
|
||||
"evse_ct_power_phase": {
|
||||
"name": "EVSE CT power {phase_name}"
|
||||
},
|
||||
"evse_ct_powerfactor": {
|
||||
"name": "Power factor EVSE CT"
|
||||
},
|
||||
"evse_ct_powerfactor_phase": {
|
||||
"name": "Power factor EVSE CT {phase_name}"
|
||||
},
|
||||
"evse_ct_status_flags": {
|
||||
"name": "Meter status flags active EVSE CT"
|
||||
},
|
||||
"evse_ct_status_flags_phase": {
|
||||
"name": "Meter status flags active EVSE CT {phase_name}"
|
||||
},
|
||||
"evse_ct_voltage": {
|
||||
"name": "Voltage EVSE CT"
|
||||
},
|
||||
"evse_ct_voltage_phase": {
|
||||
"name": "Voltage EVSE CT {phase_name}"
|
||||
},
|
||||
"grid_status": {
|
||||
"name": "[%key:component::enphase_envoy::entity::binary_sensor::grid_status::name%]",
|
||||
"state": {
|
||||
@@ -270,6 +378,60 @@
|
||||
"lifetime_production_phase": {
|
||||
"name": "Lifetime energy production {phase_name}"
|
||||
},
|
||||
"load_ct_current": {
|
||||
"name": "Load CT current"
|
||||
},
|
||||
"load_ct_current_phase": {
|
||||
"name": "Load CT current {phase_name}"
|
||||
},
|
||||
"load_ct_energy_delivered": {
|
||||
"name": "Load CT energy delivered"
|
||||
},
|
||||
"load_ct_energy_delivered_phase": {
|
||||
"name": "Load CT energy delivered {phase_name}"
|
||||
},
|
||||
"load_ct_energy_received": {
|
||||
"name": "Load CT energy received"
|
||||
},
|
||||
"load_ct_energy_received_phase": {
|
||||
"name": "Load CT energy received {phase_name}"
|
||||
},
|
||||
"load_ct_frequency": {
|
||||
"name": "Frequency load CT"
|
||||
},
|
||||
"load_ct_frequency_phase": {
|
||||
"name": "Frequency load CT {phase_name}"
|
||||
},
|
||||
"load_ct_metering_status": {
|
||||
"name": "Metering status load CT"
|
||||
},
|
||||
"load_ct_metering_status_phase": {
|
||||
"name": "Metering status load CT {phase_name}"
|
||||
},
|
||||
"load_ct_power": {
|
||||
"name": "Load CT power"
|
||||
},
|
||||
"load_ct_power_phase": {
|
||||
"name": "Load CT power {phase_name}"
|
||||
},
|
||||
"load_ct_powerfactor": {
|
||||
"name": "Power factor load CT"
|
||||
},
|
||||
"load_ct_powerfactor_phase": {
|
||||
"name": "Power factor load CT {phase_name}"
|
||||
},
|
||||
"load_ct_status_flags": {
|
||||
"name": "Meter status flags active load CT"
|
||||
},
|
||||
"load_ct_status_flags_phase": {
|
||||
"name": "Meter status flags active load CT {phase_name}"
|
||||
},
|
||||
"load_ct_voltage": {
|
||||
"name": "Voltage load CT"
|
||||
},
|
||||
"load_ct_voltage_phase": {
|
||||
"name": "Voltage load CT {phase_name}"
|
||||
},
|
||||
"max_capacity": {
|
||||
"name": "Battery capacity"
|
||||
},
|
||||
@@ -331,6 +493,18 @@
|
||||
"production_ct_current_phase": {
|
||||
"name": "Production CT current {phase_name}"
|
||||
},
|
||||
"production_ct_energy_delivered": {
|
||||
"name": "Production CT energy delivered"
|
||||
},
|
||||
"production_ct_energy_delivered_phase": {
|
||||
"name": "Production CT energy delivered {phase_name}"
|
||||
},
|
||||
"production_ct_energy_received": {
|
||||
"name": "Production CT energy received"
|
||||
},
|
||||
"production_ct_energy_received_phase": {
|
||||
"name": "Production CT energy received {phase_name}"
|
||||
},
|
||||
"production_ct_frequency": {
|
||||
"name": "Frequency production CT"
|
||||
},
|
||||
@@ -343,6 +517,12 @@
|
||||
"production_ct_metering_status_phase": {
|
||||
"name": "Metering status production CT {phase_name}"
|
||||
},
|
||||
"production_ct_power": {
|
||||
"name": "Production CT power"
|
||||
},
|
||||
"production_ct_power_phase": {
|
||||
"name": "Production CT power {phase_name}"
|
||||
},
|
||||
"production_ct_powerfactor": {
|
||||
"name": "Power factor production CT"
|
||||
},
|
||||
@@ -361,6 +541,60 @@
|
||||
"production_ct_voltage_phase": {
|
||||
"name": "Voltage production CT {phase_name}"
|
||||
},
|
||||
"pv3p_ct_current": {
|
||||
"name": "PV3P CT current"
|
||||
},
|
||||
"pv3p_ct_current_phase": {
|
||||
"name": "PV3P CT current {phase_name}"
|
||||
},
|
||||
"pv3p_ct_energy_delivered": {
|
||||
"name": "PV3P CT energy delivered"
|
||||
},
|
||||
"pv3p_ct_energy_delivered_phase": {
|
||||
"name": "PV3P CT energy delivered {phase_name}"
|
||||
},
|
||||
"pv3p_ct_energy_received": {
|
||||
"name": "PV3P CT energy received"
|
||||
},
|
||||
"pv3p_ct_energy_received_phase": {
|
||||
"name": "PV3P CT energy received {phase_name}"
|
||||
},
|
||||
"pv3p_ct_frequency": {
|
||||
"name": "Frequency PV3P CT"
|
||||
},
|
||||
"pv3p_ct_frequency_phase": {
|
||||
"name": "Frequency PV3P CT {phase_name}"
|
||||
},
|
||||
"pv3p_ct_metering_status": {
|
||||
"name": "Metering status PV3P CT"
|
||||
},
|
||||
"pv3p_ct_metering_status_phase": {
|
||||
"name": "Metering status PV3P CT {phase_name}"
|
||||
},
|
||||
"pv3p_ct_power": {
|
||||
"name": "PV3P CT power"
|
||||
},
|
||||
"pv3p_ct_power_phase": {
|
||||
"name": "PV3P CT power {phase_name}"
|
||||
},
|
||||
"pv3p_ct_powerfactor": {
|
||||
"name": "Power factor PV3P CT"
|
||||
},
|
||||
"pv3p_ct_powerfactor_phase": {
|
||||
"name": "Power factor PV3P CT {phase_name}"
|
||||
},
|
||||
"pv3p_ct_status_flags": {
|
||||
"name": "Meter status flags active PV3P CT"
|
||||
},
|
||||
"pv3p_ct_status_flags_phase": {
|
||||
"name": "Meter status flags active PV3P CT {phase_name}"
|
||||
},
|
||||
"pv3p_ct_voltage": {
|
||||
"name": "Voltage PV3P CT"
|
||||
},
|
||||
"pv3p_ct_voltage_phase": {
|
||||
"name": "Voltage PV3P CT {phase_name}"
|
||||
},
|
||||
"reserve_energy": {
|
||||
"name": "Reserve battery energy"
|
||||
},
|
||||
@@ -414,6 +648,60 @@
|
||||
},
|
||||
"storage_ct_voltage_phase": {
|
||||
"name": "Voltage storage CT {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_current": {
|
||||
"name": "Total consumption CT current"
|
||||
},
|
||||
"total_consumption_ct_current_phase": {
|
||||
"name": "Total consumption CT current {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_energy_delivered": {
|
||||
"name": "Total consumption CT energy delivered"
|
||||
},
|
||||
"total_consumption_ct_energy_delivered_phase": {
|
||||
"name": "Total consumption CT energy delivered {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_energy_received": {
|
||||
"name": "Total consumption CT energy received"
|
||||
},
|
||||
"total_consumption_ct_energy_received_phase": {
|
||||
"name": "Total consumption CT energy received {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_frequency": {
|
||||
"name": "Frequency total consumption CT"
|
||||
},
|
||||
"total_consumption_ct_frequency_phase": {
|
||||
"name": "Frequency total consumption CT {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_metering_status": {
|
||||
"name": "Metering status total consumption CT"
|
||||
},
|
||||
"total_consumption_ct_metering_status_phase": {
|
||||
"name": "Metering status total consumption CT {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_power": {
|
||||
"name": "Total consumption CT power"
|
||||
},
|
||||
"total_consumption_ct_power_phase": {
|
||||
"name": "Total consumption CT power {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_powerfactor": {
|
||||
"name": "Power factor total consumption CT"
|
||||
},
|
||||
"total_consumption_ct_powerfactor_phase": {
|
||||
"name": "Power factor total consumption CT {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_status_flags": {
|
||||
"name": "Meter status flags active total consumption CT"
|
||||
},
|
||||
"total_consumption_ct_status_flags_phase": {
|
||||
"name": "Meter status flags active total consumption CT {phase_name}"
|
||||
},
|
||||
"total_consumption_ct_voltage": {
|
||||
"name": "Voltage total consumption CT"
|
||||
},
|
||||
"total_consumption_ct_voltage_phase": {
|
||||
"name": "Voltage total consumption CT {phase_name}"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -36,12 +36,12 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_DURATION, ATTR_PERIOD, ATTR_SETPOINT, EVOHOME_DATA, EvoService
|
||||
from .const import ATTR_DURATION, ATTR_PERIOD, DOMAIN, EVOHOME_DATA, EvoService
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
from .entity import EvoChild, EvoEntity
|
||||
|
||||
@@ -132,6 +132,24 @@ class EvoClimateEntity(EvoEntity, ClimateEntity):
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
async def async_clear_zone_override(self) -> None:
|
||||
"""Clear the zone override; only supported by zones."""
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_only_service",
|
||||
translation_placeholders={"service": EvoService.CLEAR_ZONE_OVERRIDE},
|
||||
)
|
||||
|
||||
async def async_set_zone_override(
|
||||
self, setpoint: float, duration: timedelta | None = None
|
||||
) -> None:
|
||||
"""Set the zone override; only supported by zones."""
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_only_service",
|
||||
translation_placeholders={"service": EvoService.SET_ZONE_OVERRIDE},
|
||||
)
|
||||
|
||||
|
||||
class EvoZone(EvoChild, EvoClimateEntity):
|
||||
"""Base for any evohome-compatible heating zone."""
|
||||
@@ -170,22 +188,22 @@ class EvoZone(EvoChild, EvoClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
async def async_zone_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (setpoint override) for a zone."""
|
||||
if service == EvoService.CLEAR_ZONE_OVERRIDE:
|
||||
await self.coordinator.call_client_api(self._evo_device.reset())
|
||||
return
|
||||
async def async_clear_zone_override(self) -> None:
|
||||
"""Clear the zone's override, if any."""
|
||||
await self.coordinator.call_client_api(self._evo_device.reset())
|
||||
|
||||
# otherwise it is EvoService.SET_ZONE_OVERRIDE
|
||||
temperature = max(min(data[ATTR_SETPOINT], self.max_temp), self.min_temp)
|
||||
async def async_set_zone_override(
|
||||
self, setpoint: float, duration: timedelta | None = None
|
||||
) -> None:
|
||||
"""Set the zone's override (mode/setpoint)."""
|
||||
temperature = max(min(setpoint, self.max_temp), self.min_temp)
|
||||
|
||||
if ATTR_DURATION in data:
|
||||
duration: timedelta = data[ATTR_DURATION]
|
||||
if duration is not None:
|
||||
if duration.total_seconds() == 0:
|
||||
await self._update_schedule()
|
||||
until = self.setpoints.get("next_sp_from")
|
||||
else:
|
||||
until = dt_util.now() + data[ATTR_DURATION]
|
||||
until = dt_util.now() + duration
|
||||
else:
|
||||
until = None # indefinitely
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, EvoService
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -47,22 +47,12 @@ class EvoEntity(CoordinatorEntity[EvoDataUpdateCoordinator]):
|
||||
raise NotImplementedError
|
||||
if payload["unique_id"] != self._attr_unique_id:
|
||||
return
|
||||
if payload["service"] in (
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
):
|
||||
await self.async_zone_svc_request(payload["service"], payload["data"])
|
||||
return
|
||||
await self.async_tcs_svc_request(payload["service"], payload["data"])
|
||||
|
||||
async def async_tcs_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (system mode) for a controller."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_zone_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (setpoint override) for a zone."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any]:
|
||||
"""Return the evohome-specific state attributes."""
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Final
|
||||
from typing import Any, Final
|
||||
|
||||
from evohomeasync2.const import SZ_CAN_BE_TEMPORARY, SZ_SYSTEM_MODE, SZ_TIMING_MODE
|
||||
from evohomeasync2.schemas.const import (
|
||||
@@ -13,9 +13,10 @@ from evohomeasync2.schemas.const import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_MODE
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.const import ATTR_MODE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
@@ -25,21 +26,38 @@ from .coordinator import EvoDataUpdateCoordinator
|
||||
# system mode schemas are built dynamically when the services are registered
|
||||
# because supported modes can vary for edge-case systems
|
||||
|
||||
CLEAR_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{vol.Required(ATTR_ENTITY_ID): cv.entity_id}
|
||||
)
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=0), max=timedelta(days=1)),
|
||||
),
|
||||
}
|
||||
)
|
||||
# Zone service schemas (registered as entity services)
|
||||
CLEAR_ZONE_OVERRIDE_SCHEMA: Final[dict[str | vol.Marker, Any]] = {}
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final[dict[str | vol.Marker, Any]] = {
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=0), max=timedelta(days=1)),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def _register_zone_entity_services(hass: HomeAssistant) -> None:
|
||||
"""Register entity-level services for zones."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema=CLEAR_ZONE_OVERRIDE_SCHEMA,
|
||||
func="async_clear_zone_override",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema=SET_ZONE_OVERRIDE_SCHEMA,
|
||||
func="async_set_zone_override",
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -51,8 +69,6 @@ def setup_service_functions(
|
||||
Not all Honeywell TCC-compatible systems support all operating modes. In addition,
|
||||
each mode will require any of four distinct service schemas. This has to be
|
||||
enumerated before registering the appropriate handlers.
|
||||
|
||||
It appears that all TCC-compatible systems support the same three zones modes.
|
||||
"""
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
@@ -72,28 +88,6 @@ def setup_service_functions(
|
||||
}
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_override(call: ServiceCall) -> None:
|
||||
"""Set the zone override (setpoint)."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
registry = er.async_get(hass)
|
||||
registry_entry = registry.async_get(entity_id)
|
||||
|
||||
if registry_entry is None or registry_entry.platform != DOMAIN:
|
||||
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
|
||||
|
||||
if registry_entry.domain != "climate":
|
||||
raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone")
|
||||
|
||||
payload = {
|
||||
"unique_id": registry_entry.unique_id,
|
||||
"service": call.service,
|
||||
"data": call.data,
|
||||
}
|
||||
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
assert coordinator.tcs is not None # mypy
|
||||
|
||||
hass.services.async_register(DOMAIN, EvoService.REFRESH_SYSTEM, force_refresh)
|
||||
@@ -156,16 +150,4 @@ def setup_service_functions(
|
||||
schema=vol.Schema(vol.Any(*system_mode_schemas)),
|
||||
)
|
||||
|
||||
# The zone modes are consistent across all systems and use the same schema
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=CLEAR_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=SET_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
_register_zone_entity_services(hass)
|
||||
|
||||
@@ -28,14 +28,11 @@ reset_system:
|
||||
refresh_system:
|
||||
|
||||
set_zone_override:
|
||||
target:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
example: climate.bathroom
|
||||
selector:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
setpoint:
|
||||
required: true
|
||||
selector:
|
||||
@@ -49,10 +46,7 @@ set_zone_override:
|
||||
object:
|
||||
|
||||
clear_zone_override:
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
target:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"exceptions": {
|
||||
"zone_only_service": {
|
||||
"message": "Only zones support the `{service}` action"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clear_zone_override": {
|
||||
"description": "Sets a zone to follow its schedule.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"description": "[%key:component::evohome::services::set_zone_override::fields::entity_id::description%]",
|
||||
"name": "[%key:component::evohome::services::set_zone_override::fields::entity_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Clear zone override"
|
||||
},
|
||||
"refresh_system": {
|
||||
@@ -43,10 +42,6 @@
|
||||
"description": "The zone will revert to its schedule after this time. If 0 the change is until the next scheduled setpoint.",
|
||||
"name": "Duration"
|
||||
},
|
||||
"entity_id": {
|
||||
"description": "The entity ID of the Evohome zone.",
|
||||
"name": "Entity"
|
||||
},
|
||||
"setpoint": {
|
||||
"description": "The temperature to be used instead of the scheduled setpoint.",
|
||||
"name": "Setpoint"
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260225.0"]
|
||||
"requirements": ["home-assistant-frontend==20260226.0"]
|
||||
}
|
||||
|
||||
@@ -45,6 +45,10 @@ async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
|
||||
except BaseException as ex:
|
||||
del stores[user_id]
|
||||
future.set_exception(ex)
|
||||
# Ensure the future is marked as retrieved
|
||||
# since if there is no concurrent call it
|
||||
# will otherwise never be retrieved.
|
||||
future.exception()
|
||||
raise
|
||||
future.set_result(store)
|
||||
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"audio_unmute": {
|
||||
"default": "mdi:volume-high"
|
||||
},
|
||||
"earc_unmute": {
|
||||
"default": "mdi:volume-high"
|
||||
},
|
||||
"oled_fade": {
|
||||
"default": "mdi:cellphone-information"
|
||||
},
|
||||
|
||||
@@ -31,6 +31,32 @@ class HDFuryNumberEntityDescription(NumberEntityDescription):
|
||||
|
||||
|
||||
NUMBERS: tuple[HDFuryNumberEntityDescription, ...] = (
|
||||
HDFuryNumberEntityDescription(
|
||||
key="unmutecnt",
|
||||
translation_key="audio_unmute",
|
||||
entity_registry_enabled_default=False,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=50,
|
||||
native_max_value=1000,
|
||||
native_step=1,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MILLISECONDS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda client, value: client.set_audio_unmute(value),
|
||||
),
|
||||
HDFuryNumberEntityDescription(
|
||||
key="earcunmutecnt",
|
||||
translation_key="earc_unmute",
|
||||
entity_registry_enabled_default=False,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0,
|
||||
native_max_value=1000,
|
||||
native_step=1,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MILLISECONDS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda client, value: client.set_earc_unmute(value),
|
||||
),
|
||||
HDFuryNumberEntityDescription(
|
||||
key="oledfade",
|
||||
translation_key="oled_fade",
|
||||
|
||||
@@ -41,6 +41,12 @@
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"audio_unmute": {
|
||||
"name": "Unmute delay"
|
||||
},
|
||||
"earc_unmute": {
|
||||
"name": "eARC unmute delay"
|
||||
},
|
||||
"oled_fade": {
|
||||
"name": "OLED fade timer"
|
||||
},
|
||||
|
||||
@@ -57,8 +57,8 @@
|
||||
"battery_charge_discharge_state": {
|
||||
"name": "Battery charge/discharge state",
|
||||
"state": {
|
||||
"charging": "Charging",
|
||||
"discharging": "Discharging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"discharging": "[%key:common::state::discharging%]",
|
||||
"static": "Static"
|
||||
}
|
||||
},
|
||||
|
||||
153
homeassistant/components/infrared/__init__.py
Normal file
153
homeassistant/components/infrared/__init__.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Provides functionality to interact with infrared devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import final
|
||||
|
||||
from infrared_protocols import Command as InfraredCommand
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"InfraredEntity",
|
||||
"InfraredEntityDescription",
|
||||
"async_get_emitters",
|
||||
"async_send_command",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[InfraredEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the infrared domain."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[InfraredEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_emitters(hass: HomeAssistant) -> list[InfraredEntity]:
|
||||
"""Get all infrared emitters."""
|
||||
component = hass.data.get(DATA_COMPONENT)
|
||||
if component is None:
|
||||
return []
|
||||
|
||||
return list(component.entities)
|
||||
|
||||
|
||||
async def async_send_command(
|
||||
hass: HomeAssistant,
|
||||
entity_id_or_uuid: str,
|
||||
command: InfraredCommand,
|
||||
context: Context | None = None,
|
||||
) -> None:
|
||||
"""Send an IR command to the specified infrared entity.
|
||||
|
||||
Raises:
|
||||
HomeAssistantError: If the infrared entity is not found.
|
||||
"""
|
||||
component = hass.data.get(DATA_COMPONENT)
|
||||
if component is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="component_not_loaded",
|
||||
)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
entity_id = er.async_validate_entity_id(ent_reg, entity_id_or_uuid)
|
||||
entity = component.get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entity_not_found",
|
||||
translation_placeholders={"entity_id": entity_id},
|
||||
)
|
||||
|
||||
if context is not None:
|
||||
entity.async_set_context(context)
|
||||
|
||||
await entity.async_send_command_internal(command)
|
||||
|
||||
|
||||
class InfraredEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
"""Describes infrared entities."""
|
||||
|
||||
|
||||
class InfraredEntity(RestoreEntity):
|
||||
"""Base class for infrared transmitter entities."""
|
||||
|
||||
entity_description: InfraredEntityDescription
|
||||
_attr_should_poll = False
|
||||
_attr_state: None = None
|
||||
|
||||
__last_command_sent: str | None = None
|
||||
|
||||
@property
|
||||
@final
|
||||
def state(self) -> str | None:
|
||||
"""Return the entity state."""
|
||||
return self.__last_command_sent
|
||||
|
||||
@final
|
||||
async def async_send_command_internal(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command and update state.
|
||||
|
||||
Should not be overridden, handles setting last sent timestamp.
|
||||
"""
|
||||
await self.async_send_command(command)
|
||||
self.__last_command_sent = dt_util.utcnow().isoformat(timespec="milliseconds")
|
||||
self.async_write_ha_state()
|
||||
|
||||
@final
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the infrared entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state not in (STATE_UNAVAILABLE, None):
|
||||
self.__last_command_sent = state.state
|
||||
|
||||
@abstractmethod
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command.
|
||||
|
||||
Args:
|
||||
command: The IR command to send.
|
||||
|
||||
Raises:
|
||||
HomeAssistantError: If transmission fails.
|
||||
"""
|
||||
5
homeassistant/components/infrared/const.py
Normal file
5
homeassistant/components/infrared/const.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Constants for the Infrared integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "infrared"
|
||||
7
homeassistant/components/infrared/icons.json
Normal file
7
homeassistant/components/infrared/icons.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:led-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
9
homeassistant/components/infrared/manifest.json
Normal file
9
homeassistant/components/infrared/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "infrared",
|
||||
"name": "Infrared",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/infrared",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["infrared-protocols==1.0.0"]
|
||||
}
|
||||
10
homeassistant/components/infrared/strings.json
Normal file
10
homeassistant/components/infrared/strings.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"exceptions": {
|
||||
"component_not_loaded": {
|
||||
"message": "Infrared component not loaded"
|
||||
},
|
||||
"entity_not_found": {
|
||||
"message": "Infrared entity `{entity_id}` not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -56,7 +56,9 @@ from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
COMPONENTS_WITH_DEMO_PLATFORM = [
|
||||
Platform.BUTTON,
|
||||
Platform.FAN,
|
||||
Platform.IMAGE,
|
||||
Platform.INFRARED,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
@@ -131,6 +133,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Notify backup listeners
|
||||
hass.async_create_task(_notify_backup_listeners(hass), eager_start=False)
|
||||
|
||||
# Reload config entry when subentries are added/removed/updated
|
||||
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
|
||||
|
||||
# Subscribe to labs feature updates for kitchen_sink preview repair
|
||||
entry.async_on_unload(
|
||||
async_subscribe_preview_feature(
|
||||
@@ -147,6 +152,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Reload config entry on update (e.g. subentry added/removed)."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload config entry."""
|
||||
# Notify backup listeners
|
||||
|
||||
@@ -8,18 +8,23 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.infrared import (
|
||||
DOMAIN as INFRARED_DOMAIN,
|
||||
async_get_emitters,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
OptionsFlowWithReload,
|
||||
OptionsFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
|
||||
|
||||
from . import DOMAIN
|
||||
from .const import CONF_INFRARED_ENTITY_ID, DOMAIN
|
||||
|
||||
CONF_BOOLEAN = "bool"
|
||||
CONF_INT = "int"
|
||||
@@ -44,7 +49,10 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this handler."""
|
||||
return {"entity": SubentryFlowHandler}
|
||||
return {
|
||||
"entity": SubentryFlowHandler,
|
||||
"infrared_fan": InfraredFanSubentryFlowHandler,
|
||||
}
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Set the config entry up from yaml."""
|
||||
@@ -65,7 +73,7 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle options."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -146,7 +154,7 @@ class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Reconfigure a sensor."""
|
||||
if user_input is not None:
|
||||
title = user_input.pop("name")
|
||||
return self.async_update_reload_and_abort(
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
@@ -162,3 +170,35 @@ class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class InfraredFanSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle infrared fan subentry flow."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add an infrared fan."""
|
||||
|
||||
entities = async_get_emitters(self.hass)
|
||||
if not entities:
|
||||
return self.async_abort(reason="no_emitters")
|
||||
|
||||
if user_input is not None:
|
||||
title = user_input.pop("name")
|
||||
return self.async_create_entry(data=user_input, title=title)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("name"): str,
|
||||
vol.Required(CONF_INFRARED_ENTITY_ID): EntitySelector(
|
||||
EntitySelectorConfig(
|
||||
domain=INFRARED_DOMAIN,
|
||||
include_entities=[entity.entity_id for entity in entities],
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN = "kitchen_sink"
|
||||
CONF_INFRARED_ENTITY_ID = "infrared_entity_id"
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
150
homeassistant/components/kitchen_sink/fan.py
Normal file
150
homeassistant/components/kitchen_sink/fan.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Demo platform that offers a fake infrared fan entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import infrared_protocols
|
||||
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.components.infrared import async_send_command
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
|
||||
from .const import CONF_INFRARED_ENTITY_ID, DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
DUMMY_FAN_ADDRESS = 0x1234
|
||||
DUMMY_CMD_POWER_ON = 0x01
|
||||
DUMMY_CMD_POWER_OFF = 0x02
|
||||
DUMMY_CMD_SPEED_LOW = 0x03
|
||||
DUMMY_CMD_SPEED_MEDIUM = 0x04
|
||||
DUMMY_CMD_SPEED_HIGH = 0x05
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the demo infrared fan platform."""
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if subentry.subentry_type != "infrared_fan":
|
||||
continue
|
||||
async_add_entities(
|
||||
[
|
||||
DemoInfraredFan(
|
||||
subentry_id=subentry_id,
|
||||
device_name=subentry.title,
|
||||
infrared_entity_id=subentry.data[CONF_INFRARED_ENTITY_ID],
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class DemoInfraredFan(FanEntity):
|
||||
"""Representation of a demo infrared fan entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_should_poll = False
|
||||
_attr_assumed_state = True
|
||||
_attr_speed_count = 3
|
||||
_attr_supported_features = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
subentry_id: str,
|
||||
device_name: str,
|
||||
infrared_entity_id: str,
|
||||
) -> None:
|
||||
"""Initialize the demo infrared fan entity."""
|
||||
self._infrared_entity_id = infrared_entity_id
|
||||
self._attr_unique_id = subentry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry_id)},
|
||||
name=device_name,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to infrared entity state changes."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def _async_ir_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle infrared entity state changes."""
|
||||
new_state = event.data["new_state"]
|
||||
self._attr_available = (
|
||||
new_state is not None and new_state.state != STATE_UNAVAILABLE
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, [self._infrared_entity_id], _async_ir_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
# Set initial availability based on current infrared entity state
|
||||
ir_state = self.hass.states.get(self._infrared_entity_id)
|
||||
self._attr_available = (
|
||||
ir_state is not None and ir_state.state != STATE_UNAVAILABLE
|
||||
)
|
||||
|
||||
async def _send_command(self, command_code: int) -> None:
|
||||
"""Send an IR command using the NEC protocol."""
|
||||
command = infrared_protocols.NECCommand(
|
||||
address=DUMMY_FAN_ADDRESS,
|
||||
command=command_code,
|
||||
modulation=38000,
|
||||
)
|
||||
await async_send_command(
|
||||
self.hass, self._infrared_entity_id, command, context=self._context
|
||||
)
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
preset_mode: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
if percentage is not None:
|
||||
await self.async_set_percentage(percentage)
|
||||
return
|
||||
await self._send_command(DUMMY_CMD_POWER_ON)
|
||||
self._attr_percentage = 33
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the fan."""
|
||||
await self._send_command(DUMMY_CMD_POWER_OFF)
|
||||
self._attr_percentage = 0
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed percentage of the fan."""
|
||||
if percentage == 0:
|
||||
await self.async_turn_off()
|
||||
return
|
||||
|
||||
if percentage <= 33:
|
||||
await self._send_command(DUMMY_CMD_SPEED_LOW)
|
||||
elif percentage <= 66:
|
||||
await self._send_command(DUMMY_CMD_SPEED_MEDIUM)
|
||||
else:
|
||||
await self._send_command(DUMMY_CMD_SPEED_HIGH)
|
||||
|
||||
self._attr_percentage = percentage
|
||||
self.async_write_ha_state()
|
||||
65
homeassistant/components/kitchen_sink/infrared.py
Normal file
65
homeassistant/components/kitchen_sink/infrared.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Demo platform that offers a fake infrared entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import infrared_protocols
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.infrared import InfraredEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the demo infrared platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoInfrared(
|
||||
unique_id="ir_transmitter",
|
||||
device_name="IR Blaster",
|
||||
entity_name="Infrared Transmitter",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoInfrared(InfraredEntity):
|
||||
"""Representation of a demo infrared entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
device_name: str,
|
||||
entity_name: str,
|
||||
) -> None:
|
||||
"""Initialize the demo infrared entity."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=device_name,
|
||||
)
|
||||
self._attr_name = entity_name
|
||||
|
||||
async def async_send_command(self, command: infrared_protocols.Command) -> None:
|
||||
"""Send an IR command."""
|
||||
timings = [
|
||||
interval
|
||||
for timing in command.get_raw_timings()
|
||||
for interval in (timing.high_us, -timing.low_us)
|
||||
]
|
||||
persistent_notification.async_create(
|
||||
self.hass, str(timings), title="Infrared Command"
|
||||
)
|
||||
@@ -101,6 +101,8 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if subentry.subentry_type != "entity":
|
||||
continue
|
||||
async_add_entities(
|
||||
[
|
||||
DemoSensor(
|
||||
|
||||
@@ -32,6 +32,24 @@
|
||||
"description": "Reconfigure the sensor"
|
||||
}
|
||||
}
|
||||
},
|
||||
"infrared_fan": {
|
||||
"abort": {
|
||||
"no_emitters": "No infrared transmitter entities found. Please set up an infrared device first."
|
||||
},
|
||||
"entry_type": "Infrared fan",
|
||||
"initiate_flow": {
|
||||
"user": "Add infrared fan"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"infrared_entity_id": "Infrared transmitter",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"description": "Select an infrared transmitter to control the fan."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"device": {
|
||||
|
||||
@@ -168,8 +168,9 @@ class MatterVacuum(MatterEntity, StateVacuumEntity):
|
||||
segments: dict[str, Segment] = {}
|
||||
for area in supported_areas:
|
||||
area_name = None
|
||||
if area.areaInfo and area.areaInfo.locationInfo:
|
||||
area_name = area.areaInfo.locationInfo.locationName
|
||||
location_info = area.areaInfo.locationInfo
|
||||
if location_info not in (None, clusters.NullValue):
|
||||
area_name = location_info.locationName
|
||||
|
||||
if area_name:
|
||||
segment_id = str(area.areaID)
|
||||
|
||||
@@ -512,6 +512,11 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
options.pop(CONF_WEB_SEARCH_REGION, None)
|
||||
options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||
options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||
if (
|
||||
user_input.get(CONF_CODE_INTERPRETER)
|
||||
and user_input.get(CONF_REASONING_EFFORT) == "minimal"
|
||||
):
|
||||
errors[CONF_CODE_INTERPRETER] = "code_interpreter_minimal_reasoning"
|
||||
|
||||
options.update(user_input)
|
||||
if not errors:
|
||||
@@ -539,15 +544,15 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
if not model.startswith(("o", "gpt-5")) or model.startswith("gpt-5-pro"):
|
||||
return []
|
||||
|
||||
MODELS_REASONING_MAP = {
|
||||
models_reasoning_map: dict[str | tuple[str, ...], list[str]] = {
|
||||
"gpt-5.2-pro": ["medium", "high", "xhigh"],
|
||||
"gpt-5.2": ["none", "low", "medium", "high", "xhigh"],
|
||||
("gpt-5.2", "gpt-5.3"): ["none", "low", "medium", "high", "xhigh"],
|
||||
"gpt-5.1": ["none", "low", "medium", "high"],
|
||||
"gpt-5": ["minimal", "low", "medium", "high"],
|
||||
"": ["low", "medium", "high"], # The default case
|
||||
}
|
||||
|
||||
for prefix, options in MODELS_REASONING_MAP.items():
|
||||
for prefix, options in models_reasoning_map.items():
|
||||
if model.startswith(prefix):
|
||||
return options
|
||||
return [] # pragma: no cover
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
},
|
||||
"entry_type": "AI task",
|
||||
"error": {
|
||||
"code_interpreter_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::code_interpreter_minimal_reasoning%]",
|
||||
"model_not_supported": "[%key:component::openai_conversation::config_subentries::conversation::error::model_not_supported%]",
|
||||
"web_search_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::web_search_minimal_reasoning%]"
|
||||
},
|
||||
@@ -93,6 +94,7 @@
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
"error": {
|
||||
"code_interpreter_minimal_reasoning": "Code interpreter is not supported with minimal reasoning effort",
|
||||
"model_not_supported": "This model is not supported, please select a different model",
|
||||
"web_search_minimal_reasoning": "Web search is currently not supported with minimal reasoning effort"
|
||||
},
|
||||
|
||||
@@ -1 +1,51 @@
|
||||
"""The orvibo component."""
|
||||
"""The orvibo integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from orvibo.s20 import S20, S20Exception
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import S20ConfigEntry
|
||||
|
||||
PLATFORMS = [Platform.SWITCH]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: core.HomeAssistant, entry: S20ConfigEntry) -> bool:
|
||||
"""Set up platform from a ConfigEntry."""
|
||||
|
||||
try:
|
||||
s20 = await hass.async_add_executor_job(
|
||||
S20,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_MAC],
|
||||
)
|
||||
_LOGGER.debug("Initialized S20 at %s", entry.data[CONF_HOST])
|
||||
except S20Exception as err:
|
||||
_LOGGER.debug("S20 at %s couldn't be initialized", entry.data[CONF_HOST])
|
||||
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="init_error",
|
||||
translation_placeholders={
|
||||
"host": entry.data[CONF_HOST],
|
||||
},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = s20
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: S20ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
205
homeassistant/components/orvibo/config_flow.py
Normal file
205
homeassistant/components/orvibo/config_flow.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""Config flow for the orvibo integration."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from orvibo.s20 import S20, S20Exception, discover
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import CONF_SWITCH_LIST, DEFAULT_NAME, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FULL_EDIT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_MAC): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class S20ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the config flow for Orvibo S20 switches."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize an instance of the S20 config flow."""
|
||||
self.discovery_task: asyncio.Task | None = None
|
||||
self._discovered_switches: dict[str, dict[str, Any]] = {}
|
||||
self.chosen_switch: dict[str, Any] = {}
|
||||
|
||||
async def _async_discover(self) -> None:
|
||||
def _filter_discovered_switches(
|
||||
switches: dict[str, dict[str, Any]],
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
# Get existing unique_ids from config entries
|
||||
existing_ids = {entry.unique_id for entry in self._async_current_entries()}
|
||||
_LOGGER.debug("Existing unique IDs: %s", existing_ids)
|
||||
# Build a new filtered dict
|
||||
filtered = {}
|
||||
for ip, info in switches.items():
|
||||
mac_bytes = info.get("mac")
|
||||
if not mac_bytes:
|
||||
continue # skip if no MAC
|
||||
|
||||
unique_id = format_mac(mac_bytes.hex()).lower()
|
||||
if unique_id not in existing_ids:
|
||||
filtered[ip] = info
|
||||
_LOGGER.debug("New switches: %s", filtered)
|
||||
return filtered
|
||||
|
||||
# Discover S20 devices.
|
||||
_LOGGER.debug("Discovering S20 switches")
|
||||
|
||||
_unfiltered_switches = await self.hass.async_add_executor_job(discover)
|
||||
_LOGGER.debug("All discovered switches: %s", _unfiltered_switches)
|
||||
|
||||
self._discovered_switches = _filter_discovered_switches(_unfiltered_switches)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="user", menu_options=["start_discovery", "edit"]
|
||||
)
|
||||
|
||||
async def _validate_input(self, user_input: dict[str, Any]) -> str | None:
|
||||
"""Validate user input and discover MAC if missing."""
|
||||
|
||||
if user_input.get(CONF_MAC):
|
||||
user_input[CONF_MAC] = format_mac(user_input[CONF_MAC]).lower()
|
||||
if len(user_input[CONF_MAC]) != 17 or user_input[CONF_MAC].count(":") != 5:
|
||||
return "invalid_mac"
|
||||
|
||||
try:
|
||||
device = await self.hass.async_add_executor_job(
|
||||
S20,
|
||||
user_input[CONF_HOST],
|
||||
user_input.get(CONF_MAC),
|
||||
)
|
||||
|
||||
if not user_input.get(CONF_MAC):
|
||||
# Using private attribute access here since S20 class doesn't have a public method to get the MAC without repeating discovery
|
||||
if not device._mac: # noqa: SLF001
|
||||
return "cannot_discover"
|
||||
user_input[CONF_MAC] = format_mac(device._mac.hex()).lower() # noqa: SLF001
|
||||
|
||||
except S20Exception:
|
||||
return "cannot_connect"
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_edit(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Edit a discovered or manually configured server."""
|
||||
|
||||
errors = {}
|
||||
if user_input:
|
||||
error = await self._validate_input(user_input)
|
||||
if not error:
|
||||
await self.async_set_unique_id(user_input[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"{DEFAULT_NAME} ({user_input[CONF_HOST]})", data=user_input
|
||||
)
|
||||
errors["base"] = error
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="edit",
|
||||
data_schema=FULL_EDIT_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_start_discovery(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if not self.discovery_task:
|
||||
self.discovery_task = self.hass.async_create_task(self._async_discover())
|
||||
return self.async_show_progress(
|
||||
step_id="start_discovery",
|
||||
progress_action="start_discovery",
|
||||
progress_task=self.discovery_task,
|
||||
)
|
||||
if self.discovery_task.done():
|
||||
try:
|
||||
self.discovery_task.result()
|
||||
except (S20Exception, OSError) as err:
|
||||
_LOGGER.debug("Discovery task failed: %s", err)
|
||||
self.discovery_task = None
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=(
|
||||
"choose_switch" if self._discovered_switches else "discovery_failed"
|
||||
)
|
||||
)
|
||||
return self.async_show_progress(
|
||||
step_id="start_discovery",
|
||||
progress_action="start_discovery",
|
||||
progress_task=self.discovery_task,
|
||||
)
|
||||
|
||||
async def async_step_choose_switch(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Choose manual or discover flow."""
|
||||
_chosen_host: str
|
||||
|
||||
if user_input:
|
||||
_chosen_host = user_input[CONF_SWITCH_LIST]
|
||||
for host, data in self._discovered_switches.items():
|
||||
if _chosen_host == host:
|
||||
self.chosen_switch[CONF_HOST] = host
|
||||
self.chosen_switch[CONF_MAC] = format_mac(
|
||||
data[CONF_MAC].hex()
|
||||
).lower()
|
||||
await self.async_set_unique_id(self.chosen_switch[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"{DEFAULT_NAME} ({host})", data=self.chosen_switch
|
||||
)
|
||||
_LOGGER.debug("discovered switches: %s", self._discovered_switches)
|
||||
|
||||
_options = {
|
||||
host: f"{host} ({format_mac(data[CONF_MAC].hex()).lower()})"
|
||||
for host, data in self._discovered_switches.items()
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="choose_switch",
|
||||
data_schema=vol.Schema({vol.Required(CONF_SWITCH_LIST): vol.In(_options)}),
|
||||
)
|
||||
|
||||
async def async_step_discovery_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a failed discovery."""
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="discovery_failed", menu_options=["start_discovery", "edit"]
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import from configuration.yaml."""
|
||||
_LOGGER.debug("Importing config: %s", user_input)
|
||||
|
||||
error = await self._validate_input(user_input)
|
||||
if error:
|
||||
return self.async_abort(reason=error)
|
||||
|
||||
await self.async_set_unique_id(user_input[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input.get(CONF_NAME, user_input[CONF_HOST]), data=user_input
|
||||
)
|
||||
5
homeassistant/components/orvibo/const.py
Normal file
5
homeassistant/components/orvibo/const.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Constants for the orvibo integration."""
|
||||
|
||||
DOMAIN = "orvibo"
|
||||
DEFAULT_NAME = "S20"
|
||||
CONF_SWITCH_LIST = "switches"
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "orvibo",
|
||||
"name": "Orvibo",
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/orvibo",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["orvibo"],
|
||||
|
||||
7
homeassistant/components/orvibo/models.py
Normal file
7
homeassistant/components/orvibo/models.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Data models for the Orvibo integration."""
|
||||
|
||||
from orvibo.s20 import S20
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
type S20ConfigEntry = ConfigEntry[S20]
|
||||
71
homeassistant/components/orvibo/strings.json
Normal file
71
homeassistant/components/orvibo/strings.json
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "Unable to connect to the S20 switch",
|
||||
"cannot_discover": "Unable to discover MAC address of S20 switch. Please enter the MAC address.",
|
||||
"invalid_mac": "Invalid MAC address format"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:component::orvibo::config::abort::cannot_connect%]",
|
||||
"cannot_discover": "[%key:component::orvibo::config::abort::cannot_discover%]",
|
||||
"invalid_mac": "Invalid MAC address format"
|
||||
},
|
||||
"progress": {
|
||||
"start_discovery": "Attempting to discover new S20 switches\n\nThis will take about 3 seconds\n\nDiscovery may fail if the switch is asleep. If your switch does not appear, please power toggle your switch before re-running discovery.",
|
||||
"title": "Orvibo S20"
|
||||
},
|
||||
"step": {
|
||||
"choose_switch": {
|
||||
"data": {
|
||||
"switches": "Choose discovered switch to configure"
|
||||
},
|
||||
"title": "Discovered switches"
|
||||
},
|
||||
"discovery_failed": {
|
||||
"description": "No S20 switches were discovered on the network. Discovery may have failed if the switch is asleep. Please power toggle your switch before re-running discovery.",
|
||||
"menu_options": {
|
||||
"edit": "Enter configuration manually",
|
||||
"start_discovery": "Try discovering again"
|
||||
},
|
||||
"title": "Discovery failed"
|
||||
},
|
||||
"edit": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"mac": "MAC address"
|
||||
},
|
||||
"title": "Configure Orvibo S20 switch"
|
||||
},
|
||||
"user": {
|
||||
"menu_options": {
|
||||
"edit": "Enter configuration manually",
|
||||
"start_discovery": "Discover new S20 switches"
|
||||
},
|
||||
"title": "Orvibo S20 Configuration"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"init_error": {
|
||||
"message": "Error while initializing S20 {host}."
|
||||
},
|
||||
"turn_off_error": {
|
||||
"message": "Error while turning off S20 {name}."
|
||||
},
|
||||
"turn_on_error": {
|
||||
"message": "Error while turning on S20 {name}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"yaml_deprecation": {
|
||||
"description": "The device (MAC: {mac}, Host: {host}) is configured in `configuration.yaml`. The Orvibo integration now supports UI-based configuration and this device has been migrated to the new UI. Please remove the YAML block from `configuration.yaml` to avoid future issues.",
|
||||
"title": "Legacy YAML configuration detected {host}"
|
||||
},
|
||||
"yaml_deprecation_import_issue": {
|
||||
"description": "Attempting to import this device (MAC: {mac}, Host: {host}) from YAML has failed for reason {reason}. 1) Remove the YAML block from `configuration.yaml`, 2) Restart Home Assistant, 3) Add the device using the UI configuration flow.",
|
||||
"title": "Legacy YAML configuration import issue for {host}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,14 @@
|
||||
"""Support for Orvibo S20 Wifi Smart Switches."""
|
||||
"""Switch platform for the Orvibo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from orvibo.s20 import S20, S20Exception, discover
|
||||
from orvibo.s20 import S20, S20Exception
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.switch import (
|
||||
PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA,
|
||||
SwitchEntity,
|
||||
@@ -20,14 +21,25 @@ from homeassistant.const import (
|
||||
CONF_SWITCHES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
from .models import S20ConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "Orvibo S20 Switch"
|
||||
DEFAULT_DISCOVERY = True
|
||||
DEFAULT_DISCOVERY = False
|
||||
|
||||
# Library is not thread safe and uses global variables, so we limit to 1 update at a time
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
@@ -46,65 +58,138 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities_callback: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up S20 switches."""
|
||||
"""Set up the integration from configuration.yaml."""
|
||||
for switch in config.get(CONF_SWITCHES, []):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data=switch,
|
||||
)
|
||||
|
||||
switch_data = {}
|
||||
switches = []
|
||||
switch_conf = config.get(CONF_SWITCHES, [config])
|
||||
|
||||
if config.get(CONF_DISCOVERY):
|
||||
_LOGGER.debug("Discovering S20 switches")
|
||||
switch_data.update(discover())
|
||||
|
||||
for switch in switch_conf:
|
||||
switch_data[switch.get(CONF_HOST)] = switch
|
||||
|
||||
for host, data in switch_data.items():
|
||||
try:
|
||||
switches.append(
|
||||
S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC)))
|
||||
if (
|
||||
result.get("type") is FlowResultType.ABORT
|
||||
and result.get("reason") != "already_configured"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"yaml_deprecation_import_issue_{switch.get('host')}_{(switch.get('mac') or 'unknown_mac').replace(':', '').lower()}",
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="yaml_deprecation_import_issue",
|
||||
translation_placeholders={
|
||||
"reason": str(result.get("reason")),
|
||||
"host": switch.get("host"),
|
||||
"mac": switch.get("mac", ""),
|
||||
},
|
||||
)
|
||||
_LOGGER.debug("Initialized S20 at %s", host)
|
||||
except S20Exception:
|
||||
_LOGGER.error("S20 at %s couldn't be initialized", host)
|
||||
continue
|
||||
|
||||
add_entities_callback(switches)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"yaml_deprecation_{switch.get('host')}_{(switch.get('mac') or 'unknown_mac').replace(':', '').lower()}",
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="yaml_deprecation",
|
||||
translation_placeholders={
|
||||
"host": switch.get("host"),
|
||||
"mac": switch.get("mac") or "Unknown MAC",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: S20ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up orvibo from a config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
S20Switch(
|
||||
entry.title,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_MAC],
|
||||
entry.runtime_data,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class S20Switch(SwitchEntity):
|
||||
"""Representation of an S20 switch."""
|
||||
|
||||
def __init__(self, name, s20):
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, name: str, host: str, mac: str, s20: S20) -> None:
|
||||
"""Initialize the S20 device."""
|
||||
|
||||
self._attr_name = name
|
||||
self._s20 = s20
|
||||
self._attr_is_on = False
|
||||
self._exc = S20Exception
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update device state."""
|
||||
try:
|
||||
self._attr_is_on = self._s20.on
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while fetching S20 state")
|
||||
self._host = host
|
||||
self._mac = mac
|
||||
self._s20 = s20
|
||||
self._attr_unique_id = self._mac
|
||||
self._name = name
|
||||
self._attr_name = None
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
# MAC addresses are used as unique identifiers within this domain
|
||||
(DOMAIN, self._attr_unique_id)
|
||||
},
|
||||
name=name,
|
||||
manufacturer="Orvibo",
|
||||
model="S20",
|
||||
connections={(CONNECTION_NETWORK_MAC, self._mac)},
|
||||
)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
try:
|
||||
self._s20.on = True
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while turning on S20")
|
||||
except S20Exception as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="turn_on_error",
|
||||
translation_placeholders={"name": self._name},
|
||||
) from err
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the device off."""
|
||||
try:
|
||||
self._s20.on = False
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while turning off S20")
|
||||
except S20Exception as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="turn_off_error",
|
||||
translation_placeholders={"name": self._name},
|
||||
) from err
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update device state."""
|
||||
try:
|
||||
self._attr_is_on = self._s20.on
|
||||
|
||||
# If the device was previously offline, let the user know it's back!
|
||||
if not self._attr_available:
|
||||
_LOGGER.info("Orvibo switch %s reconnected", self._name)
|
||||
self._attr_available = True
|
||||
|
||||
except S20Exception as err:
|
||||
# Only log the error if this is the FIRST time it failed
|
||||
if self._attr_available:
|
||||
_LOGGER.info(
|
||||
"Error communicating with Orvibo switch %s: %s", self._name, err
|
||||
)
|
||||
self._attr_available = False
|
||||
|
||||
@@ -137,11 +137,10 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
|
||||
|
||||
_attr_effect: str
|
||||
_attr_translation_key = "ambilight"
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
_attr_supported_features = LightEntityFeature.EFFECT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PhilipsTVDataUpdateCoordinator,
|
||||
) -> None:
|
||||
def __init__(self, coordinator: PhilipsTVDataUpdateCoordinator) -> None:
|
||||
"""Initialize light."""
|
||||
self._tv = coordinator.api
|
||||
self._hs = None
|
||||
@@ -150,8 +149,6 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
|
||||
self._last_selected_effect: AmbilightEffect | None = None
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_supported_color_modes = {ColorMode.HS, ColorMode.ONOFF}
|
||||
self._attr_supported_features = LightEntityFeature.EFFECT
|
||||
self._attr_unique_id = coordinator.unique_id
|
||||
|
||||
self._update_from_coordinator()
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PortainerConfigEntry
|
||||
from .const import CONTAINER_STATE_RUNNING, STACK_STATUS_ACTIVE
|
||||
from .coordinator import PortainerContainerData, PortainerCoordinator
|
||||
from .coordinator import PortainerContainerData
|
||||
from .entity import (
|
||||
PortainerContainerEntity,
|
||||
PortainerCoordinatorData,
|
||||
@@ -165,18 +165,6 @@ class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerEndpointBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerEndpointBinarySensorEntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize Portainer endpoint binary sensor entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -188,19 +176,6 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerContainerBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerContainerBinarySensorEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -212,19 +187,6 @@ class PortainerStackSensor(PortainerStackEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerStackBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackBinarySensorEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
|
||||
@@ -167,18 +167,6 @@ class PortainerEndpointButton(PortainerEndpointEntity, PortainerBaseButton):
|
||||
|
||||
entity_description: PortainerButtonDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerButtonDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer endpoint button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Call the endpoint button press action."""
|
||||
await self.entity_description.press_action(
|
||||
@@ -191,19 +179,6 @@ class PortainerContainerButton(PortainerContainerEntity, PortainerBaseButton):
|
||||
|
||||
entity_description: PortainerButtonDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerButtonDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Call the container button press action."""
|
||||
await self.entity_description.press_action(
|
||||
|
||||
@@ -170,11 +170,11 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
docker_system_df,
|
||||
stacks,
|
||||
) = await asyncio.gather(
|
||||
self.portainer.get_containers(endpoint_id=endpoint.id),
|
||||
self.portainer.docker_version(endpoint_id=endpoint.id),
|
||||
self.portainer.docker_info(endpoint_id=endpoint.id),
|
||||
self.portainer.get_containers(endpoint.id),
|
||||
self.portainer.docker_version(endpoint.id),
|
||||
self.portainer.docker_info(endpoint.id),
|
||||
self.portainer.docker_system_df(endpoint.id),
|
||||
self.portainer.get_stacks(endpoint_id=endpoint.id),
|
||||
self.portainer.get_stacks(endpoint.id),
|
||||
)
|
||||
|
||||
prev_endpoint = self.data.get(endpoint.id) if self.data else None
|
||||
|
||||
@@ -5,13 +5,13 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import PortainerConfigEntry
|
||||
from .coordinator import PortainerCoordinator
|
||||
|
||||
TO_REDACT = [CONF_API_TOKEN]
|
||||
TO_REDACT = [CONF_API_TOKEN, CONF_URL]
|
||||
|
||||
|
||||
def _serialize_coordinator(coordinator: PortainerCoordinator) -> dict[str, Any]:
|
||||
|
||||
@@ -4,6 +4,7 @@ from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
@@ -26,11 +27,13 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_info: PortainerCoordinatorData,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize a Portainer endpoint."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.device_id = device_info.endpoint.id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -45,6 +48,7 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
name=device_info.endpoint.name,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -57,12 +61,14 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_info: PortainerContainerData,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize a Portainer container."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.device_id = self._device_info.container.id
|
||||
self.endpoint_id = via_device.endpoint.id
|
||||
@@ -91,13 +97,14 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
# else it's the endpoint
|
||||
via_device=(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{device_info.stack.name}"
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{device_info.stack.id}"
|
||||
if device_info.stack
|
||||
else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
translation_key=None if self.device_name else "unknown_container",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -119,12 +126,14 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_info: PortainerStackData,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize a Portainer stack."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.stack_id = device_info.stack.id
|
||||
self.device_name = device_info.stack.name
|
||||
@@ -135,7 +144,7 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}",
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{self.stack_id}",
|
||||
)
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
@@ -149,6 +158,7 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.stack_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -21,7 +21,6 @@ from .const import STACK_TYPE_COMPOSE, STACK_TYPE_KUBERNETES, STACK_TYPE_SWARM
|
||||
from .coordinator import (
|
||||
PortainerConfigEntry,
|
||||
PortainerContainerData,
|
||||
PortainerCoordinator,
|
||||
PortainerStackData,
|
||||
)
|
||||
from .entity import (
|
||||
@@ -398,19 +397,6 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerContainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerContainerSensorEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
@@ -422,18 +408,6 @@ class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerEndpointSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerEndpointSensorEntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer endpoint sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
@@ -446,19 +420,6 @@ class PortainerStackSensor(PortainerStackEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerStackSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackSensorEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
|
||||
@@ -167,19 +167,6 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
||||
|
||||
entity_description: PortainerSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerSwitchEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container switch."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
@@ -209,19 +196,6 @@ class PortainerStackSwitch(PortainerStackEntity, SwitchEntity):
|
||||
|
||||
entity_description: PortainerStackSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackSwitchEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack switch."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
|
||||
@@ -4,13 +4,19 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from powerfox import DeviceType, Powerfox, PowerfoxConnectionError
|
||||
from powerfox import (
|
||||
DeviceType,
|
||||
Powerfox,
|
||||
PowerfoxAuthenticationError,
|
||||
PowerfoxConnectionError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
PowerfoxConfigEntry,
|
||||
PowerfoxDataUpdateCoordinator,
|
||||
@@ -30,9 +36,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerfoxConfigEntry) ->
|
||||
|
||||
try:
|
||||
devices = await client.all_devices()
|
||||
except PowerfoxAuthenticationError as err:
|
||||
await client.close()
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed",
|
||||
) from err
|
||||
except PowerfoxConnectionError as err:
|
||||
await client.close()
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
|
||||
coordinators: list[
|
||||
PowerfoxDataUpdateCoordinator | PowerfoxReportDataUpdateCoordinator
|
||||
|
||||
@@ -59,18 +59,24 @@ class PowerfoxBaseCoordinator[T](DataUpdateCoordinator[T]):
|
||||
except PowerfoxAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": str(err)},
|
||||
translation_key="auth_failed",
|
||||
) from err
|
||||
except (
|
||||
PowerfoxConnectionError,
|
||||
PowerfoxNoDataError,
|
||||
PowerfoxPrivacyError,
|
||||
) as err:
|
||||
except PowerfoxConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
except PowerfoxNoDataError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_data_error",
|
||||
translation_placeholders={"device_name": self.device.name},
|
||||
) from err
|
||||
except PowerfoxPrivacyError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="privacy_error",
|
||||
translation_placeholders={"device_name": self.device.name},
|
||||
) from err
|
||||
|
||||
async def _async_fetch_data(self) -> T:
|
||||
|
||||
@@ -116,11 +116,17 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_auth": {
|
||||
"message": "Error while authenticating with the Powerfox service: {error}"
|
||||
"auth_failed": {
|
||||
"message": "Authentication with the Powerfox service failed. Please re-authenticate your account."
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Error while updating the Powerfox service: {error}"
|
||||
"connection_error": {
|
||||
"message": "Could not connect to the Powerfox service. Please check your network connection."
|
||||
},
|
||||
"no_data_error": {
|
||||
"message": "No data available for device \"{device_name}\". The device may not have reported data yet."
|
||||
},
|
||||
"privacy_error": {
|
||||
"message": "Data for device \"{device_name}\" is restricted due to privacy settings in the Powerfox app."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NODE_ONLINE, VM_CONTAINER_RUNNING
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxNodeData
|
||||
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -147,18 +147,6 @@ class ProxmoxNodeBinarySensor(ProxmoxNodeEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxNodeBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeBinarySensorEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize Proxmox node binary sensor entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -170,19 +158,6 @@ class ProxmoxVMBinarySensor(ProxmoxVMEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxVMBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMBinarySensorEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM binary sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -194,19 +169,6 @@ class ProxmoxContainerBinarySensor(ProxmoxContainerEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxContainerBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerBinarySensorEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container binary sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
|
||||
@@ -262,18 +262,6 @@ class ProxmoxNodeButtonEntity(ProxmoxNodeEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxNodeButtonNodeEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeButtonNodeEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Node button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the node button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
@@ -288,19 +276,6 @@ class ProxmoxVMButtonEntity(ProxmoxVMEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxVMButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMButtonEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the VM button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
@@ -316,19 +291,6 @@ class ProxmoxContainerButtonEntity(ProxmoxContainerEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxContainerButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerButtonEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the container button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
|
||||
@@ -8,6 +8,7 @@ from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -36,6 +37,7 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox node entity."""
|
||||
@@ -43,6 +45,7 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
self._node_data = node_data
|
||||
self.device_id = node_data.node["id"]
|
||||
self.device_name = node_data.node["node"]
|
||||
self.entity_description = entity_description
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{coordinator.config_entry.entry_id}_node_{self.device_id}")
|
||||
@@ -54,6 +57,8 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
@@ -66,11 +71,13 @@ class ProxmoxVMEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._vm_data = vm_data
|
||||
self._node_name = node_data.node["node"]
|
||||
self.device_id = vm_data["vmid"]
|
||||
@@ -91,6 +98,8 @@ class ProxmoxVMEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
@@ -112,11 +121,13 @@ class ProxmoxContainerEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._container_data = container_data
|
||||
self._node_name = node_data.node["node"]
|
||||
self.device_id = container_data["vmid"]
|
||||
@@ -140,6 +151,8 @@ class ProxmoxContainerEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.const import PERCENTAGE, UnitOfInformation
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxNodeData
|
||||
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
|
||||
|
||||
|
||||
@@ -320,18 +320,6 @@ class ProxmoxNodeSensor(ProxmoxNodeEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxNodeSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeSensorEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, node_data)
|
||||
self.entity_description = entity_description
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
@@ -343,19 +331,6 @@ class ProxmoxVMSensor(ProxmoxVMEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxVMSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMSensorEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
@@ -367,19 +342,6 @@ class ProxmoxContainerSensor(ProxmoxContainerEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxContainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerSensorEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
|
||||
@@ -32,6 +32,14 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::proxmoxve::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::proxmoxve::config::step::user::data_description::password%]",
|
||||
"port": "[%key:component::proxmoxve::config::step::user::data_description::port%]",
|
||||
"realm": "[%key:component::proxmoxve::config::step::user::data_description::realm%]",
|
||||
"username": "[%key:component::proxmoxve::config::step::user::data_description::username%]",
|
||||
"verify_ssl": "[%key:component::proxmoxve::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Proxmox VE server connection.",
|
||||
"title": "Reconfigure Proxmox VE integration"
|
||||
},
|
||||
@@ -44,6 +52,14 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Proxmox VE server",
|
||||
"password": "The password for the Proxmox VE server",
|
||||
"port": "The port of your Proxmox VE server (default: 8006)",
|
||||
"realm": "The authentication realm for the Proxmox VE server (default: 'pam')",
|
||||
"username": "The username for the Proxmox VE server",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
},
|
||||
"description": "Enter your Proxmox VE server details to set up the integration.",
|
||||
"title": "Connect to Proxmox VE"
|
||||
}
|
||||
|
||||
@@ -4,11 +4,16 @@ from __future__ import annotations
|
||||
|
||||
from pycognito.exceptions import WarrantException
|
||||
import pyschlage
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, SERVICE_ADD_CODE, SERVICE_DELETE_CODE, SERVICE_GET_CODES
|
||||
from .coordinator import SchlageConfigEntry, SchlageDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@@ -19,6 +24,46 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Schlage component."""
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADD_CODE,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required("name"): cv.string,
|
||||
vol.Required("code"): cv.matches_regex(r"^\d{4,8}$"),
|
||||
},
|
||||
func=SERVICE_ADD_CODE,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DELETE_CODE,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required("name"): cv.string,
|
||||
},
|
||||
func=SERVICE_DELETE_CODE,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_CODES,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema=None,
|
||||
func=SERVICE_GET_CODES,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool:
|
||||
"""Set up Schlage from a config entry."""
|
||||
|
||||
@@ -7,3 +7,7 @@ DOMAIN = "schlage"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
MANUFACTURER = "Schlage"
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
SERVICE_ADD_CODE = "add_code"
|
||||
SERVICE_DELETE_CODE = "delete_code"
|
||||
SERVICE_GET_CODES = "get_codes"
|
||||
|
||||
13
homeassistant/components/schlage/icons.json
Normal file
13
homeassistant/components/schlage/icons.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"services": {
|
||||
"add_code": {
|
||||
"service": "mdi:key-plus"
|
||||
},
|
||||
"delete_code": {
|
||||
"service": "mdi:key-minus"
|
||||
},
|
||||
"get_codes": {
|
||||
"service": "mdi:table-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,10 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyschlage.code import AccessCode
|
||||
from pyschlage.exceptions import Error as SchlageError
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LockData, SchlageConfigEntry, SchlageDataUpdateCoordinator
|
||||
from .entity import SchlageEntity
|
||||
|
||||
@@ -64,3 +69,108 @@ class SchlageLockEntity(SchlageEntity, LockEntity):
|
||||
"""Unlock the device."""
|
||||
await self.hass.async_add_executor_job(self._lock.unlock)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@staticmethod
|
||||
def _normalize_code_name(name: str) -> str:
|
||||
"""Normalize a code name for comparison."""
|
||||
return name.lower().strip()
|
||||
|
||||
def _validate_code_name(
|
||||
self, codes: dict[str, AccessCode] | None, name: str
|
||||
) -> None:
|
||||
"""Validate that the code name doesn't already exist."""
|
||||
normalized = self._normalize_code_name(name)
|
||||
if codes and any(
|
||||
self._normalize_code_name(code.name) == normalized
|
||||
for code in codes.values()
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_name_exists",
|
||||
translation_placeholders={"name": name},
|
||||
)
|
||||
|
||||
def _validate_code_value(
|
||||
self, codes: dict[str, AccessCode] | None, code: str
|
||||
) -> None:
|
||||
"""Validate that the code value doesn't already exist."""
|
||||
if codes and any(
|
||||
existing_code.code == code for existing_code in codes.values()
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_code_exists",
|
||||
)
|
||||
|
||||
async def _async_fetch_access_codes(self) -> dict[str, AccessCode] | None:
|
||||
"""Fetch access codes from the lock on demand."""
|
||||
try:
|
||||
await self.hass.async_add_executor_job(self._lock.refresh_access_codes)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_refresh_failed",
|
||||
) from ex
|
||||
return self._lock.access_codes
|
||||
|
||||
async def add_code(self, name: str, code: str) -> None:
|
||||
"""Add a lock code."""
|
||||
|
||||
codes = await self._async_fetch_access_codes()
|
||||
self._validate_code_name(codes, name)
|
||||
self._validate_code_value(codes, code)
|
||||
|
||||
access_code = AccessCode(name=name, code=code)
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
self._lock.add_access_code, access_code
|
||||
)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_add_code_failed",
|
||||
) from ex
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def delete_code(self, name: str) -> None:
|
||||
"""Delete a lock code."""
|
||||
codes = await self._async_fetch_access_codes()
|
||||
if not codes:
|
||||
return
|
||||
|
||||
normalized = self._normalize_code_name(name)
|
||||
code_id_to_delete = next(
|
||||
(
|
||||
code_id
|
||||
for code_id, code_data in codes.items()
|
||||
if self._normalize_code_name(code_data.name) == normalized
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not code_id_to_delete:
|
||||
# Code not found in defined codes, operation successful
|
||||
return
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(codes[code_id_to_delete].delete)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_delete_code_failed",
|
||||
) from ex
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def get_codes(self) -> ServiceResponse:
|
||||
"""Get lock codes."""
|
||||
await self._async_fetch_access_codes()
|
||||
|
||||
if self._lock.access_codes:
|
||||
return {
|
||||
code: {
|
||||
"name": self._lock.access_codes[code].name,
|
||||
"code": self._lock.access_codes[code].code,
|
||||
}
|
||||
for code in self._lock.access_codes
|
||||
}
|
||||
return {}
|
||||
|
||||
38
homeassistant/components/schlage/services.yaml
Normal file
38
homeassistant/components/schlage/services.yaml
Normal file
@@ -0,0 +1,38 @@
|
||||
get_codes:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
|
||||
add_code:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
example: "Example Person"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
code:
|
||||
required: true
|
||||
example: "1111"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
type: password
|
||||
|
||||
delete_code:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
example: "Example Person"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
@@ -56,8 +56,50 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"schlage_add_code_failed": {
|
||||
"message": "Failed to add PIN code to the lock."
|
||||
},
|
||||
"schlage_code_exists": {
|
||||
"message": "A PIN code with this value already exists on the lock."
|
||||
},
|
||||
"schlage_delete_code_failed": {
|
||||
"message": "Failed to delete PIN code from the lock."
|
||||
},
|
||||
"schlage_name_exists": {
|
||||
"message": "A PIN code with the name \"{name}\" already exists on the lock."
|
||||
},
|
||||
"schlage_refresh_failed": {
|
||||
"message": "Failed to refresh Schlage data"
|
||||
"message": "Failed to refresh Schlage data."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_code": {
|
||||
"description": "Adds a PIN code to a lock.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "The PIN code to add. Must be unique to the lock and be between 4 and 8 digits long.",
|
||||
"name": "PIN code"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name for PIN code. Must be case insensitively unique to the lock.",
|
||||
"name": "PIN name"
|
||||
}
|
||||
},
|
||||
"name": "Add PIN code"
|
||||
},
|
||||
"delete_code": {
|
||||
"description": "Deletes a PIN code from a lock.",
|
||||
"fields": {
|
||||
"name": {
|
||||
"description": "Name of PIN code to delete.",
|
||||
"name": "[%key:component::schlage::services::add_code::fields::name::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Delete PIN code"
|
||||
},
|
||||
"get_codes": {
|
||||
"description": "Retrieves all PIN codes from a lock.",
|
||||
"name": "Get PIN codes"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,6 +124,17 @@ class SFTPFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
if not user_input[CONF_BACKUP_LOCATION].startswith("/"):
|
||||
errors[CONF_BACKUP_LOCATION] = "backup_location_relative"
|
||||
return self.async_show_form(
|
||||
step_id=step_id,
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, user_input
|
||||
),
|
||||
description_placeholders=placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate auth input and save uploaded key file if provided
|
||||
user_input = await self._validate_auth_and_save_keyfile(user_input)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"already_configured": "Integration already configured. Host with same address, port and backup location already exists."
|
||||
},
|
||||
"error": {
|
||||
"backup_location_relative": "The remote path must be an absolute path (starting with `/`).",
|
||||
"invalid_key": "Invalid key uploaded. Please make sure key corresponds to valid SSH key algorithm.",
|
||||
"key_or_password_needed": "Please configure password or private key file location for SFTP Storage.",
|
||||
"os_error": "{error_message}. Please check if host and/or port are correct.",
|
||||
|
||||
@@ -38,6 +38,12 @@
|
||||
"ssl": "[%key:common::config_flow::data::ssl%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"group": "[%key:component::sma::config::step::user::data_description::group%]",
|
||||
"host": "[%key:component::sma::config::step::user::data_description::host%]",
|
||||
"ssl": "[%key:component::sma::config::step::user::data_description::ssl%]",
|
||||
"verify_ssl": "[%key:component::sma::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your SMA device.",
|
||||
"title": "Reconfigure SMA Solar Integration"
|
||||
},
|
||||
@@ -50,7 +56,11 @@
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your SMA device."
|
||||
"group": "The group of your SMA device, where the Modbus connection is configured",
|
||||
"host": "The hostname or IP address of your SMA device",
|
||||
"password": "The password for your SMA device",
|
||||
"ssl": "Whether to use SSL to connect to your SMA device. This is required for newer SMA devices, but older devices do not support SSL",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
},
|
||||
"description": "Enter your SMA device information.",
|
||||
"title": "Set up SMA Solar"
|
||||
|
||||
@@ -177,6 +177,12 @@
|
||||
"on": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"do_not_disturb": {
|
||||
"default": "mdi:minus-circle-off",
|
||||
"state": {
|
||||
"on": "mdi:minus-circle"
|
||||
}
|
||||
},
|
||||
"dry_plus": {
|
||||
"default": "mdi:heat-wave"
|
||||
},
|
||||
|
||||
@@ -34,5 +34,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.5.3"]
|
||||
"requirements": ["pysmartthings==3.6.0"]
|
||||
}
|
||||
|
||||
@@ -95,6 +95,7 @@ ROBOT_CLEANER_TURBO_MODE_STATE_MAP = {
|
||||
|
||||
ROBOT_CLEANER_MOVEMENT_MAP = {
|
||||
"powerOff": "off",
|
||||
"washingMop": "washing_mop",
|
||||
}
|
||||
|
||||
OVEN_MODE = {
|
||||
@@ -880,6 +881,7 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
"after",
|
||||
"cleaning",
|
||||
"pause",
|
||||
"washing_mop",
|
||||
],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda value: ROBOT_CLEANER_MOVEMENT_MAP.get(value, value),
|
||||
|
||||
@@ -718,7 +718,8 @@
|
||||
"off": "[%key:common::state::off%]",
|
||||
"pause": "[%key:common::state::paused%]",
|
||||
"point": "Point",
|
||||
"reserve": "Reserve"
|
||||
"reserve": "Reserve",
|
||||
"washing_mop": "Washing mop"
|
||||
}
|
||||
},
|
||||
"robot_cleaner_turbo_mode": {
|
||||
@@ -858,6 +859,9 @@
|
||||
"display_lighting": {
|
||||
"name": "Display lighting"
|
||||
},
|
||||
"do_not_disturb": {
|
||||
"name": "Do not disturb"
|
||||
},
|
||||
"dry_plus": {
|
||||
"name": "Dry plus"
|
||||
},
|
||||
|
||||
@@ -162,6 +162,14 @@ CAPABILITY_TO_SWITCHES: dict[Capability | str, SmartThingsSwitchEntityDescriptio
|
||||
status_attribute=Attribute.STATUS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
Capability.CUSTOM_DO_NOT_DISTURB_MODE: SmartThingsSwitchEntityDescription(
|
||||
key=Capability.CUSTOM_DO_NOT_DISTURB_MODE,
|
||||
translation_key="do_not_disturb",
|
||||
status_attribute=Attribute.DO_NOT_DISTURB,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
on_command=Command.DO_NOT_DISTURB_ON,
|
||||
off_command=Command.DO_NOT_DISTURB_OFF,
|
||||
),
|
||||
}
|
||||
DISHWASHER_WASHING_OPTIONS_TO_SWITCHES: dict[
|
||||
Attribute | str, SmartThingsDishwasherWashingOptionSwitchEntityDescription
|
||||
|
||||
@@ -42,5 +42,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["PySwitchbot==1.0.0"]
|
||||
"requirements": ["PySwitchbot==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -1080,7 +1080,6 @@ async def load_data(
|
||||
req = await client.get(url)
|
||||
except (httpx.HTTPError, httpx.InvalidURL) as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load URL: {err!s}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_url",
|
||||
translation_placeholders={"error": str(err)},
|
||||
@@ -1107,7 +1106,6 @@ async def load_data(
|
||||
1
|
||||
) # Add a sleep to allow other async operations to proceed
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load URL: {req.status_code}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_url",
|
||||
translation_placeholders={"error": str(req.status_code)},
|
||||
@@ -1117,13 +1115,11 @@ async def load_data(
|
||||
return await hass.async_add_executor_job(_read_file_as_bytesio, filepath)
|
||||
|
||||
raise ServiceValidationError(
|
||||
"File path has not been configured in allowlist_external_dirs.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="allowlist_external_dirs_error",
|
||||
)
|
||||
else:
|
||||
raise ServiceValidationError(
|
||||
"URL or File is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "URL or File"},
|
||||
@@ -1138,7 +1134,6 @@ def _validate_credentials_input(
|
||||
and not username
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
"Username is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "Username"},
|
||||
@@ -1154,7 +1149,6 @@ def _validate_credentials_input(
|
||||
and not password
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
"Password is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "Password"},
|
||||
@@ -1170,7 +1164,6 @@ def _read_file_as_bytesio(file_path: str) -> io.BytesIO:
|
||||
return data
|
||||
except OSError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load file: {err!s}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_file",
|
||||
translation_placeholders={"error": str(err)},
|
||||
|
||||
@@ -257,6 +257,9 @@ class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
|
||||
) -> StateType | date | datetime | Decimal | None:
|
||||
"""Validate the state."""
|
||||
if self._numeric_state_expected:
|
||||
if not isinstance(result, bool) and isinstance(result, (int, float)):
|
||||
return result
|
||||
|
||||
return template_validators.number(self, CONF_STATE)(result)
|
||||
|
||||
if result is None or self.device_class not in (
|
||||
|
||||
@@ -48,6 +48,7 @@ from .services import async_setup_services
|
||||
PLATFORMS: Final = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DEVICE_TRACKER,
|
||||
|
||||
282
homeassistant/components/teslemetry/calendar.py
Normal file
282
homeassistant/components/teslemetry/calendar.py
Normal file
@@ -0,0 +1,282 @@
|
||||
"""Calendar platform for Teslemetry integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TeslemetryConfigEntry
|
||||
from .entity import TeslemetryEnergyInfoEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TeslemetryConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Teslemetry Calendar platform from a config entry."""
|
||||
|
||||
entities_to_add: list[CalendarEntity] = []
|
||||
|
||||
entities_to_add.extend(
|
||||
TeslemetryTariffSchedule(energy, "tariff_content_v2")
|
||||
for energy in entry.runtime_data.energysites
|
||||
if energy.info_coordinator.data.get("tariff_content_v2_seasons")
|
||||
)
|
||||
|
||||
entities_to_add.extend(
|
||||
TeslemetryTariffSchedule(energy, "tariff_content_v2_sell_tariff")
|
||||
for energy in entry.runtime_data.energysites
|
||||
if energy.info_coordinator.data.get("tariff_content_v2_sell_tariff_seasons")
|
||||
)
|
||||
|
||||
async_add_entities(entities_to_add)
|
||||
|
||||
|
||||
def _is_day_in_range(day_of_week: int, from_day: int, to_day: int) -> bool:
|
||||
"""Check if a day of week falls within a range, handling week crossing."""
|
||||
if from_day <= to_day:
|
||||
return from_day <= day_of_week <= to_day
|
||||
# Week crossing (e.g., Fri=4 to Mon=0)
|
||||
return day_of_week >= from_day or day_of_week <= to_day
|
||||
|
||||
|
||||
def _parse_period_times(
|
||||
period_def: dict[str, Any],
|
||||
base_day: datetime,
|
||||
) -> tuple[datetime, datetime] | None:
|
||||
"""Parse a TOU period definition into start and end times.
|
||||
|
||||
Returns None if the base_day's weekday doesn't match the period's day range.
|
||||
For periods crossing midnight, end_time will be on the following day.
|
||||
"""
|
||||
# DaysOfWeek are from 0-6 (Monday-Sunday)
|
||||
from_day = period_def.get("fromDayOfWeek", 0)
|
||||
to_day = period_def.get("toDayOfWeek", 6)
|
||||
|
||||
if not _is_day_in_range(base_day.weekday(), from_day, to_day):
|
||||
return None
|
||||
|
||||
# Hours are from 0-23, so 24 hours is 0-0
|
||||
from_hour = period_def.get("fromHour", 0)
|
||||
to_hour = period_def.get("toHour", 0)
|
||||
|
||||
# Minutes are from 0-59, so 60 minutes is 0-0
|
||||
from_minute = period_def.get("fromMinute", 0)
|
||||
to_minute = period_def.get("toMinute", 0)
|
||||
|
||||
start_time = base_day.replace(
|
||||
hour=from_hour, minute=from_minute, second=0, microsecond=0
|
||||
)
|
||||
end_time = base_day.replace(hour=to_hour, minute=to_minute, second=0, microsecond=0)
|
||||
|
||||
if end_time <= start_time:
|
||||
end_time += timedelta(days=1)
|
||||
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def _build_event(
|
||||
key_base: str,
|
||||
season_name: str,
|
||||
period_name: str,
|
||||
price: float | None,
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
) -> CalendarEvent:
|
||||
"""Build a CalendarEvent for a tariff period."""
|
||||
price_str = f"{price:.2f}/kWh" if price is not None else "Unknown Price"
|
||||
return CalendarEvent(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
summary=f"{period_name.capitalize().replace('_', ' ')}: {price_str}",
|
||||
description=(
|
||||
f"Season: {season_name.capitalize()}\n"
|
||||
f"Period: {period_name.capitalize().replace('_', ' ')}\n"
|
||||
f"Price: {price_str}"
|
||||
),
|
||||
uid=f"{key_base}_{season_name}_{period_name}_{start_time.isoformat()}",
|
||||
)
|
||||
|
||||
|
||||
class TeslemetryTariffSchedule(TeslemetryEnergyInfoEntity, CalendarEntity):
|
||||
"""Energy Site Tariff Schedule Calendar."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: Any,
|
||||
key_base: str,
|
||||
) -> None:
|
||||
"""Initialize the tariff schedule calendar."""
|
||||
self.key_base: str = key_base
|
||||
self.seasons: dict[str, dict[str, Any]] = {}
|
||||
self.charges: dict[str, dict[str, Any]] = {}
|
||||
super().__init__(data, key_base)
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the current active tariff event."""
|
||||
now = dt_util.now()
|
||||
current_season_name = self._get_current_season(now)
|
||||
|
||||
if not current_season_name or not self.seasons.get(current_season_name):
|
||||
return None
|
||||
|
||||
# Time of use (TOU) periods define the tariff schedule within a season
|
||||
tou_periods = self.seasons[current_season_name].get("tou_periods", {})
|
||||
|
||||
for period_name, period_group in tou_periods.items():
|
||||
for period_def in period_group.get("periods", []):
|
||||
result = _parse_period_times(period_def, now)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
start_time, end_time = result
|
||||
|
||||
# Check if now falls within this period
|
||||
if not (start_time <= now < end_time):
|
||||
# For cross-midnight periods, check yesterday's instance
|
||||
start_time -= timedelta(days=1)
|
||||
end_time -= timedelta(days=1)
|
||||
if not (start_time <= now < end_time):
|
||||
continue
|
||||
|
||||
price = self._get_price_for_period(current_season_name, period_name)
|
||||
return _build_event(
|
||||
self.key_base,
|
||||
current_season_name,
|
||||
period_name,
|
||||
price,
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
async def async_get_events(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
) -> list[CalendarEvent]:
|
||||
"""Return calendar events (tariff periods) within a datetime range."""
|
||||
events: list[CalendarEvent] = []
|
||||
|
||||
start_date = dt_util.as_local(start_date)
|
||||
end_date = dt_util.as_local(end_date)
|
||||
|
||||
# Start one day earlier to catch TOU periods that cross midnight
|
||||
# from the previous evening into the query range.
|
||||
current_day = dt_util.start_of_local_day(start_date) - timedelta(days=1)
|
||||
while current_day < end_date:
|
||||
season_name = self._get_current_season(current_day)
|
||||
if not season_name or not self.seasons.get(season_name):
|
||||
current_day += timedelta(days=1)
|
||||
continue
|
||||
|
||||
tou_periods = self.seasons[season_name].get("tou_periods", {})
|
||||
|
||||
for period_name, period_group in tou_periods.items():
|
||||
for period_def in period_group.get("periods", []):
|
||||
result = _parse_period_times(period_def, current_day)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
start_time, end_time = result
|
||||
|
||||
if start_time < end_date and end_time > start_date:
|
||||
price = self._get_price_for_period(season_name, period_name)
|
||||
events.append(
|
||||
_build_event(
|
||||
self.key_base,
|
||||
season_name,
|
||||
period_name,
|
||||
price,
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
)
|
||||
|
||||
current_day += timedelta(days=1)
|
||||
|
||||
events.sort(key=lambda x: x.start)
|
||||
return events
|
||||
|
||||
def _get_current_season(self, date_to_check: datetime) -> str | None:
|
||||
"""Determine the active season for a given date."""
|
||||
local_date = dt_util.as_local(date_to_check)
|
||||
year = local_date.year
|
||||
|
||||
for season_name, season_data in self.seasons.items():
|
||||
if not season_data:
|
||||
continue
|
||||
|
||||
try:
|
||||
from_month = season_data["fromMonth"]
|
||||
from_day = season_data["fromDay"]
|
||||
to_month = season_data["toMonth"]
|
||||
to_day = season_data["toDay"]
|
||||
|
||||
# Handle seasons that cross year boundaries
|
||||
start_year = year
|
||||
end_year = year
|
||||
|
||||
# Season crosses year boundary (e.g., Oct-Mar)
|
||||
if from_month > to_month or (
|
||||
from_month == to_month and from_day > to_day
|
||||
):
|
||||
if local_date.month > from_month or (
|
||||
local_date.month == from_month and local_date.day >= from_day
|
||||
):
|
||||
end_year = year + 1
|
||||
else:
|
||||
start_year = year - 1
|
||||
|
||||
season_start = local_date.replace(
|
||||
year=start_year,
|
||||
month=from_month,
|
||||
day=from_day,
|
||||
hour=0,
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0,
|
||||
)
|
||||
season_end = local_date.replace(
|
||||
year=end_year,
|
||||
month=to_month,
|
||||
day=to_day,
|
||||
hour=0,
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0,
|
||||
) + timedelta(days=1)
|
||||
|
||||
if season_start <= local_date < season_end:
|
||||
return season_name
|
||||
except KeyError, ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
def _get_price_for_period(self, season_name: str, period_name: str) -> float | None:
|
||||
"""Get the price for a specific season and period name."""
|
||||
try:
|
||||
season_charges = self.charges.get(season_name, self.charges.get("ALL", {}))
|
||||
rates = season_charges.get("rates", {})
|
||||
price = rates.get(period_name, rates.get("ALL"))
|
||||
return float(price) if price is not None else None
|
||||
except KeyError, ValueError, TypeError:
|
||||
return None
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the Calendar attributes from coordinator data."""
|
||||
self.seasons = self.coordinator.data.get(f"{self.key_base}_seasons", {})
|
||||
self.charges = self.coordinator.data.get(f"{self.key_base}_energy_charges", {})
|
||||
self._attr_available = bool(self.seasons and self.charges)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user