mirror of
https://github.com/home-assistant/core.git
synced 2026-02-27 20:41:44 +01:00
Compare commits
1 Commits
ubisys_vir
...
debounce-m
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e243840558 |
@@ -34,7 +34,6 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/humidifier/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/image_processing/**
|
||||
- homeassistant/components/infrared/**
|
||||
- homeassistant/components/lawn_mower/**
|
||||
- homeassistant/components/light/**
|
||||
- homeassistant/components/lock/**
|
||||
|
||||
@@ -289,7 +289,6 @@ homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.inels.*
|
||||
homeassistant.components.infrared.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -794,8 +794,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/inels/ @epdevlab
|
||||
/homeassistant/components/influxdb/ @mdegat01 @Robbie1221
|
||||
/tests/components/influxdb/ @mdegat01 @Robbie1221
|
||||
/homeassistant/components/infrared/ @home-assistant/core
|
||||
/tests/components/infrared/ @home-assistant/core
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
/tests/components/inkbird/ @bdraco
|
||||
/homeassistant/components/input_boolean/ @home-assistant/core
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "ubisys",
|
||||
"name": "Ubisys",
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
||||
@@ -12,6 +12,10 @@ from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import DOMAIN, DOMAIN_DATA, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
@@ -71,13 +75,16 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "change_setting", _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "capture_image", _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "trigger_automation", _trigger_automation, schema=AUTOMATION_SCHEMA
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER_AUTOMATION,
|
||||
_trigger_automation,
|
||||
schema=AUTOMATION_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -10,6 +10,8 @@ from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO = "set_time_to"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -18,7 +20,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"set_time_to",
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
entity_domain=SENSOR_DOMAIN,
|
||||
schema={vol.Required("minutes"): cv.positive_int},
|
||||
func="set_time_to",
|
||||
|
||||
@@ -8,12 +8,18 @@ from homeassistant.helpers import service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_DEV_EN_ALT = "enable_alerts"
|
||||
_DEV_DS_ALT = "disable_alerts"
|
||||
_DEV_EN_REC = "start_recording"
|
||||
_DEV_DS_REC = "stop_recording"
|
||||
_DEV_SNAP = "snapshot"
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
"enable_alerts": "async_enable_alerts",
|
||||
"disable_alerts": "async_disable_alerts",
|
||||
"start_recording": "async_start_recording",
|
||||
"stop_recording": "async_stop_recording",
|
||||
"snapshot": "async_snapshot",
|
||||
_DEV_EN_ALT: "async_enable_alerts",
|
||||
_DEV_DS_ALT: "async_disable_alerts",
|
||||
_DEV_EN_REC: "async_start_recording",
|
||||
_DEV_DS_REC: "async_stop_recording",
|
||||
_DEV_SNAP: "async_snapshot",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,9 @@ from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarm_toggle_chime"
|
||||
|
||||
SERVICE_ALARM_KEYPRESS = "alarm_keypress"
|
||||
ATTR_KEYPRESS = "keypress"
|
||||
|
||||
|
||||
@@ -23,7 +26,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"alarm_toggle_chime",
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
@@ -34,7 +37,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"alarm_keypress",
|
||||
SERVICE_ALARM_KEYPRESS,
|
||||
entity_domain=ALARM_CONTROL_PANEL_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_KEYPRESS): cv.string,
|
||||
|
||||
@@ -16,6 +16,9 @@ from .coordinator import AmazonConfigEntry
|
||||
ATTR_TEXT_COMMAND = "text_command"
|
||||
ATTR_SOUND = "sound"
|
||||
ATTR_INFO_SKILL = "info_skill"
|
||||
SERVICE_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SOUND_NOTIFICATION = "send_sound"
|
||||
SERVICE_INFO_SKILL = "send_info_skill"
|
||||
|
||||
SCHEMA_SOUND_SERVICE = vol.Schema(
|
||||
{
|
||||
@@ -125,17 +128,17 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amazon Devices integration."""
|
||||
for service_name, method, schema in (
|
||||
(
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
async_send_sound_notification,
|
||||
SCHEMA_SOUND_SERVICE,
|
||||
),
|
||||
(
|
||||
"send_text_command",
|
||||
SERVICE_TEXT_COMMAND,
|
||||
async_send_text_command,
|
||||
SCHEMA_CUSTOM_COMMAND,
|
||||
),
|
||||
(
|
||||
"send_info_skill",
|
||||
SERVICE_INFO_SKILL,
|
||||
async_send_info_skill,
|
||||
SCHEMA_INFO_SKILL,
|
||||
),
|
||||
|
||||
@@ -16,6 +16,8 @@ ATTRIBUTION = "Data provided by Amber Electric"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
||||
@@ -22,6 +22,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
FEED_IN_CHANNEL,
|
||||
GENERAL_CHANNEL,
|
||||
SERVICE_GET_FORECASTS,
|
||||
)
|
||||
from .coordinator import AmberConfigEntry
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
@@ -100,7 +101,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
handle_get_forecasts,
|
||||
GET_FORECASTS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
||||
@@ -49,6 +49,18 @@ SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"]
|
||||
|
||||
_SRV_EN_REC = "enable_recording"
|
||||
_SRV_DS_REC = "disable_recording"
|
||||
_SRV_EN_AUD = "enable_audio"
|
||||
_SRV_DS_AUD = "disable_audio"
|
||||
_SRV_EN_MOT_REC = "enable_motion_recording"
|
||||
_SRV_DS_MOT_REC = "disable_motion_recording"
|
||||
_SRV_GOTO = "goto_preset"
|
||||
_SRV_CBW = "set_color_bw"
|
||||
_SRV_TOUR_ON = "start_tour"
|
||||
_SRV_TOUR_OFF = "stop_tour"
|
||||
|
||||
_SRV_PTZ_CTRL = "ptz_control"
|
||||
_ATTR_PTZ_TT = "travel_time"
|
||||
_ATTR_PTZ_MOV = "movement"
|
||||
_MOV = [
|
||||
@@ -91,17 +103,17 @@ _SRV_PTZ_SCHEMA = _SRV_SCHEMA.extend(
|
||||
)
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
"enable_recording": (_SRV_SCHEMA, "async_enable_recording", ()),
|
||||
"disable_recording": (_SRV_SCHEMA, "async_disable_recording", ()),
|
||||
"enable_audio": (_SRV_SCHEMA, "async_enable_audio", ()),
|
||||
"disable_audio": (_SRV_SCHEMA, "async_disable_audio", ()),
|
||||
"enable_motion_recording": (_SRV_SCHEMA, "async_enable_motion_recording", ()),
|
||||
"disable_motion_recording": (_SRV_SCHEMA, "async_disable_motion_recording", ()),
|
||||
"goto_preset": (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
|
||||
"set_color_bw": (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
|
||||
"start_tour": (_SRV_SCHEMA, "async_start_tour", ()),
|
||||
"stop_tour": (_SRV_SCHEMA, "async_stop_tour", ()),
|
||||
"ptz_control": (
|
||||
_SRV_EN_REC: (_SRV_SCHEMA, "async_enable_recording", ()),
|
||||
_SRV_DS_REC: (_SRV_SCHEMA, "async_disable_recording", ()),
|
||||
_SRV_EN_AUD: (_SRV_SCHEMA, "async_enable_audio", ()),
|
||||
_SRV_DS_AUD: (_SRV_SCHEMA, "async_disable_audio", ()),
|
||||
_SRV_EN_MOT_REC: (_SRV_SCHEMA, "async_enable_motion_recording", ()),
|
||||
_SRV_DS_MOT_REC: (_SRV_SCHEMA, "async_disable_motion_recording", ()),
|
||||
_SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
|
||||
_SRV_CBW: (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
|
||||
_SRV_TOUR_ON: (_SRV_SCHEMA, "async_start_tour", ()),
|
||||
_SRV_TOUR_OFF: (_SRV_SCHEMA, "async_stop_tour", ()),
|
||||
_SRV_PTZ_CTRL: (
|
||||
_SRV_PTZ_SCHEMA,
|
||||
"async_ptz_control",
|
||||
(_ATTR_PTZ_MOV, _ATTR_PTZ_TT),
|
||||
|
||||
@@ -36,7 +36,7 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -271,7 +271,7 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
msg = (
|
||||
f"Output from service 'learn_sendevent' from"
|
||||
f"Output from service '{SERVICE_LEARN_SENDEVENT}' from"
|
||||
f" {self.entity_id}: '{output}'"
|
||||
)
|
||||
persistent_notification.async_create(
|
||||
|
||||
@@ -16,6 +16,11 @@ ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -24,7 +29,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"adb_command",
|
||||
SERVICE_ADB_COMMAND,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
@@ -32,7 +37,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"learn_sendevent",
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
@@ -40,7 +45,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"download",
|
||||
SERVICE_DOWNLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
@@ -51,7 +56,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"upload",
|
||||
SERVICE_UPLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
|
||||
@@ -858,11 +858,6 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
]
|
||||
)
|
||||
messages.extend(new_messages)
|
||||
except anthropic.AuthenticationError as err:
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
raise HomeAssistantError(
|
||||
"Authentication error with Anthropic API, reauthentication required"
|
||||
) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
"""Diagnostics support for AWS S3."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER as BACKUP_DATA_MANAGER,
|
||||
BackupManager,
|
||||
)
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_PREFIX,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import S3ConfigEntry
|
||||
from .helpers import async_list_backups_from_s3
|
||||
|
||||
TO_REDACT = (CONF_ACCESS_KEY_ID, CONF_SECRET_ACCESS_KEY)
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: S3ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
backup_manager: BackupManager = hass.data[BACKUP_DATA_MANAGER]
|
||||
backups = await async_list_backups_from_s3(
|
||||
coordinator.client,
|
||||
bucket=entry.data[CONF_BUCKET],
|
||||
prefix=entry.data.get(CONF_PREFIX, ""),
|
||||
)
|
||||
|
||||
data = {
|
||||
"coordinator_data": dataclasses.asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
"backup_agents": [
|
||||
{"name": agent.name}
|
||||
for agent in backup_manager.backup_agents.values()
|
||||
if agent.domain == DOMAIN
|
||||
],
|
||||
"backup": [backup.as_dict() for backup in backups],
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
||||
@@ -38,14 +38,14 @@ rules:
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: S3 is a cloud service that is not discovered on the network.
|
||||
|
||||
@@ -43,11 +43,11 @@
|
||||
"title": "The backup location {agent_id} is unavailable"
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"description": "Apps {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Not all apps could be included in automatic backup"
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Not all add-ons could be included in automatic backup"
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Apps which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.",
|
||||
"title": "Automatic backup was created with errors"
|
||||
},
|
||||
"automatic_backup_failed_create": {
|
||||
|
||||
@@ -31,6 +31,10 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
|
||||
ATTR_SATURDAY_SLOTS = "saturday_slots"
|
||||
ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
|
||||
# Service names
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
SERVICE_SYNC_TIME = "sync_time"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
_SLOT_SCHEMA = vol.Schema(
|
||||
@@ -256,14 +260,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the BSB-LAN services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"set_hot_water_schedule",
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"sync_time",
|
||||
SERVICE_SYNC_TIME,
|
||||
async_sync_time,
|
||||
schema=SYNC_TIME_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -329,8 +329,8 @@
|
||||
"nano_nr_3": "Nano 3",
|
||||
"nano_nr_4": "Nano 4",
|
||||
"nano_nr_5": "Nano 5",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"summer": "Summer",
|
||||
"winter": "Winter"
|
||||
}
|
||||
@@ -368,8 +368,8 @@
|
||||
"pump_status": {
|
||||
"name": "Pump status",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
"off": "Off",
|
||||
"on": "On"
|
||||
}
|
||||
},
|
||||
"return_circuit_temperature": {
|
||||
|
||||
@@ -115,7 +115,7 @@ def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError, KeyError:
|
||||
except AttributeError:
|
||||
return ([], [])
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
@@ -36,12 +36,12 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_DURATION, ATTR_PERIOD, DOMAIN, EVOHOME_DATA, EvoService
|
||||
from .const import ATTR_DURATION, ATTR_PERIOD, ATTR_SETPOINT, EVOHOME_DATA, EvoService
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
from .entity import EvoChild, EvoEntity
|
||||
|
||||
@@ -132,24 +132,6 @@ class EvoClimateEntity(EvoEntity, ClimateEntity):
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
async def async_clear_zone_override(self) -> None:
|
||||
"""Clear the zone override; only supported by zones."""
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_only_service",
|
||||
translation_placeholders={"service": EvoService.CLEAR_ZONE_OVERRIDE},
|
||||
)
|
||||
|
||||
async def async_set_zone_override(
|
||||
self, setpoint: float, duration: timedelta | None = None
|
||||
) -> None:
|
||||
"""Set the zone override; only supported by zones."""
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_only_service",
|
||||
translation_placeholders={"service": EvoService.SET_ZONE_OVERRIDE},
|
||||
)
|
||||
|
||||
|
||||
class EvoZone(EvoChild, EvoClimateEntity):
|
||||
"""Base for any evohome-compatible heating zone."""
|
||||
@@ -188,22 +170,22 @@ class EvoZone(EvoChild, EvoClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
async def async_clear_zone_override(self) -> None:
|
||||
"""Clear the zone's override, if any."""
|
||||
await self.coordinator.call_client_api(self._evo_device.reset())
|
||||
async def async_zone_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (setpoint override) for a zone."""
|
||||
if service == EvoService.CLEAR_ZONE_OVERRIDE:
|
||||
await self.coordinator.call_client_api(self._evo_device.reset())
|
||||
return
|
||||
|
||||
async def async_set_zone_override(
|
||||
self, setpoint: float, duration: timedelta | None = None
|
||||
) -> None:
|
||||
"""Set the zone's override (mode/setpoint)."""
|
||||
temperature = max(min(setpoint, self.max_temp), self.min_temp)
|
||||
# otherwise it is EvoService.SET_ZONE_OVERRIDE
|
||||
temperature = max(min(data[ATTR_SETPOINT], self.max_temp), self.min_temp)
|
||||
|
||||
if duration is not None:
|
||||
if ATTR_DURATION in data:
|
||||
duration: timedelta = data[ATTR_DURATION]
|
||||
if duration.total_seconds() == 0:
|
||||
await self._update_schedule()
|
||||
until = self.setpoints.get("next_sp_from")
|
||||
else:
|
||||
until = dt_util.now() + duration
|
||||
until = dt_util.now() + data[ATTR_DURATION]
|
||||
else:
|
||||
until = None # indefinitely
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, EvoService
|
||||
from .coordinator import EvoDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -47,12 +47,22 @@ class EvoEntity(CoordinatorEntity[EvoDataUpdateCoordinator]):
|
||||
raise NotImplementedError
|
||||
if payload["unique_id"] != self._attr_unique_id:
|
||||
return
|
||||
if payload["service"] in (
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
):
|
||||
await self.async_zone_svc_request(payload["service"], payload["data"])
|
||||
return
|
||||
await self.async_tcs_svc_request(payload["service"], payload["data"])
|
||||
|
||||
async def async_tcs_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (system mode) for a controller."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_zone_svc_request(self, service: str, data: dict[str, Any]) -> None:
|
||||
"""Process a service request (setpoint override) for a zone."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any]:
|
||||
"""Return the evohome-specific state attributes."""
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any, Final
|
||||
from typing import Final
|
||||
|
||||
from evohomeasync2.const import SZ_CAN_BE_TEMPORARY, SZ_SYSTEM_MODE, SZ_TIMING_MODE
|
||||
from evohomeasync2.schemas.const import (
|
||||
@@ -13,10 +13,9 @@ from evohomeasync2.schemas.const import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.const import ATTR_MODE
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_MODE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
@@ -26,38 +25,21 @@ from .coordinator import EvoDataUpdateCoordinator
|
||||
# system mode schemas are built dynamically when the services are registered
|
||||
# because supported modes can vary for edge-case systems
|
||||
|
||||
# Zone service schemas (registered as entity services)
|
||||
CLEAR_ZONE_OVERRIDE_SCHEMA: Final[dict[str | vol.Marker, Any]] = {}
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final[dict[str | vol.Marker, Any]] = {
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=0), max=timedelta(days=1)),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def _register_zone_entity_services(hass: HomeAssistant) -> None:
|
||||
"""Register entity-level services for zones."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema=CLEAR_ZONE_OVERRIDE_SCHEMA,
|
||||
func="async_clear_zone_override",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema=SET_ZONE_OVERRIDE_SCHEMA,
|
||||
func="async_set_zone_override",
|
||||
)
|
||||
CLEAR_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{vol.Required(ATTR_ENTITY_ID): cv.entity_id}
|
||||
)
|
||||
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_SETPOINT): vol.All(
|
||||
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(days=0), max=timedelta(days=1)),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -69,6 +51,8 @@ def setup_service_functions(
|
||||
Not all Honeywell TCC-compatible systems support all operating modes. In addition,
|
||||
each mode will require any of four distinct service schemas. This has to be
|
||||
enumerated before registering the appropriate handlers.
|
||||
|
||||
It appears that all TCC-compatible systems support the same three zones modes.
|
||||
"""
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
@@ -88,6 +72,28 @@ def setup_service_functions(
|
||||
}
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_override(call: ServiceCall) -> None:
|
||||
"""Set the zone override (setpoint)."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
registry = er.async_get(hass)
|
||||
registry_entry = registry.async_get(entity_id)
|
||||
|
||||
if registry_entry is None or registry_entry.platform != DOMAIN:
|
||||
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
|
||||
|
||||
if registry_entry.domain != "climate":
|
||||
raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone")
|
||||
|
||||
payload = {
|
||||
"unique_id": registry_entry.unique_id,
|
||||
"service": call.service,
|
||||
"data": call.data,
|
||||
}
|
||||
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
assert coordinator.tcs is not None # mypy
|
||||
|
||||
hass.services.async_register(DOMAIN, EvoService.REFRESH_SYSTEM, force_refresh)
|
||||
@@ -150,4 +156,16 @@ def setup_service_functions(
|
||||
schema=vol.Schema(vol.Any(*system_mode_schemas)),
|
||||
)
|
||||
|
||||
_register_zone_entity_services(hass)
|
||||
# The zone modes are consistent across all systems and use the same schema
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.CLEAR_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=CLEAR_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
EvoService.SET_ZONE_OVERRIDE,
|
||||
set_zone_override,
|
||||
schema=SET_ZONE_OVERRIDE_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -28,11 +28,14 @@ reset_system:
|
||||
refresh_system:
|
||||
|
||||
set_zone_override:
|
||||
target:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
example: climate.bathroom
|
||||
selector:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
setpoint:
|
||||
required: true
|
||||
selector:
|
||||
@@ -46,7 +49,10 @@ set_zone_override:
|
||||
object:
|
||||
|
||||
clear_zone_override:
|
||||
target:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: evohome
|
||||
domain: climate
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"exceptions": {
|
||||
"zone_only_service": {
|
||||
"message": "Only zones support the `{service}` service"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clear_zone_override": {
|
||||
"description": "Sets a zone to follow its schedule.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"description": "[%key:component::evohome::services::set_zone_override::fields::entity_id::description%]",
|
||||
"name": "[%key:component::evohome::services::set_zone_override::fields::entity_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Clear zone override"
|
||||
},
|
||||
"refresh_system": {
|
||||
@@ -42,6 +43,10 @@
|
||||
"description": "The zone will revert to its schedule after this time. If 0 the change is until the next scheduled setpoint.",
|
||||
"name": "Duration"
|
||||
},
|
||||
"entity_id": {
|
||||
"description": "The entity ID of the Evohome zone.",
|
||||
"name": "Entity"
|
||||
},
|
||||
"setpoint": {
|
||||
"description": "The temperature to be used instead of the scheduled setpoint.",
|
||||
"name": "Setpoint"
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260226.0"]
|
||||
"requirements": ["home-assistant-frontend==20260225.0"]
|
||||
}
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
"""Provides functionality to interact with infrared devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import final
|
||||
|
||||
from infrared_protocols import Command as InfraredCommand
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"InfraredEntity",
|
||||
"InfraredEntityDescription",
|
||||
"async_get_emitters",
|
||||
"async_send_command",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[InfraredEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the infrared domain."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[InfraredEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_emitters(hass: HomeAssistant) -> list[InfraredEntity]:
|
||||
"""Get all infrared emitters."""
|
||||
component = hass.data.get(DATA_COMPONENT)
|
||||
if component is None:
|
||||
return []
|
||||
|
||||
return list(component.entities)
|
||||
|
||||
|
||||
async def async_send_command(
|
||||
hass: HomeAssistant,
|
||||
entity_id_or_uuid: str,
|
||||
command: InfraredCommand,
|
||||
context: Context | None = None,
|
||||
) -> None:
|
||||
"""Send an IR command to the specified infrared entity.
|
||||
|
||||
Raises:
|
||||
HomeAssistantError: If the infrared entity is not found.
|
||||
"""
|
||||
component = hass.data.get(DATA_COMPONENT)
|
||||
if component is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="component_not_loaded",
|
||||
)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
entity_id = er.async_validate_entity_id(ent_reg, entity_id_or_uuid)
|
||||
entity = component.get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entity_not_found",
|
||||
translation_placeholders={"entity_id": entity_id},
|
||||
)
|
||||
|
||||
if context is not None:
|
||||
entity.async_set_context(context)
|
||||
|
||||
await entity.async_send_command_internal(command)
|
||||
|
||||
|
||||
class InfraredEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
"""Describes infrared entities."""
|
||||
|
||||
|
||||
class InfraredEntity(RestoreEntity):
|
||||
"""Base class for infrared transmitter entities."""
|
||||
|
||||
entity_description: InfraredEntityDescription
|
||||
_attr_should_poll = False
|
||||
_attr_state: None = None
|
||||
|
||||
__last_command_sent: str | None = None
|
||||
|
||||
@property
|
||||
@final
|
||||
def state(self) -> str | None:
|
||||
"""Return the entity state."""
|
||||
return self.__last_command_sent
|
||||
|
||||
@final
|
||||
async def async_send_command_internal(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command and update state.
|
||||
|
||||
Should not be overridden, handles setting last sent timestamp.
|
||||
"""
|
||||
await self.async_send_command(command)
|
||||
self.__last_command_sent = dt_util.utcnow().isoformat(timespec="milliseconds")
|
||||
self.async_write_ha_state()
|
||||
|
||||
@final
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the infrared entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state not in (STATE_UNAVAILABLE, None):
|
||||
self.__last_command_sent = state.state
|
||||
|
||||
@abstractmethod
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command.
|
||||
|
||||
Args:
|
||||
command: The IR command to send.
|
||||
|
||||
Raises:
|
||||
HomeAssistantError: If transmission fails.
|
||||
"""
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Infrared integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "infrared"
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:led-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"domain": "infrared",
|
||||
"name": "Infrared",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/infrared",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["infrared-protocols==1.0.0"]
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"exceptions": {
|
||||
"component_not_loaded": {
|
||||
"message": "Infrared component not loaded"
|
||||
},
|
||||
"entity_not_found": {
|
||||
"message": "Infrared entity `{entity_id}` not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -56,9 +56,7 @@ from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
COMPONENTS_WITH_DEMO_PLATFORM = [
|
||||
Platform.BUTTON,
|
||||
Platform.FAN,
|
||||
Platform.IMAGE,
|
||||
Platform.INFRARED,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
@@ -133,9 +131,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Notify backup listeners
|
||||
hass.async_create_task(_notify_backup_listeners(hass), eager_start=False)
|
||||
|
||||
# Reload config entry when subentries are added/removed/updated
|
||||
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
|
||||
|
||||
# Subscribe to labs feature updates for kitchen_sink preview repair
|
||||
entry.async_on_unload(
|
||||
async_subscribe_preview_feature(
|
||||
@@ -152,11 +147,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Reload config entry on update (e.g. subentry added/removed)."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload config entry."""
|
||||
# Notify backup listeners
|
||||
|
||||
@@ -8,23 +8,18 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.infrared import (
|
||||
DOMAIN as INFRARED_DOMAIN,
|
||||
async_get_emitters,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
|
||||
|
||||
from .const import CONF_INFRARED_ENTITY_ID, DOMAIN
|
||||
from . import DOMAIN
|
||||
|
||||
CONF_BOOLEAN = "bool"
|
||||
CONF_INT = "int"
|
||||
@@ -49,10 +44,7 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this handler."""
|
||||
return {
|
||||
"entity": SubentryFlowHandler,
|
||||
"infrared_fan": InfraredFanSubentryFlowHandler,
|
||||
}
|
||||
return {"entity": SubentryFlowHandler}
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Set the config entry up from yaml."""
|
||||
@@ -73,7 +65,7 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Handle options."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -154,7 +146,7 @@ class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Reconfigure a sensor."""
|
||||
if user_input is not None:
|
||||
title = user_input.pop("name")
|
||||
return self.async_update_and_abort(
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
@@ -170,35 +162,3 @@ class SubentryFlowHandler(ConfigSubentryFlow):
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class InfraredFanSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle infrared fan subentry flow."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""User flow to add an infrared fan."""
|
||||
|
||||
entities = async_get_emitters(self.hass)
|
||||
if not entities:
|
||||
return self.async_abort(reason="no_emitters")
|
||||
|
||||
if user_input is not None:
|
||||
title = user_input.pop("name")
|
||||
return self.async_create_entry(data=user_input, title=title)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("name"): str,
|
||||
vol.Required(CONF_INFRARED_ENTITY_ID): EntitySelector(
|
||||
EntitySelectorConfig(
|
||||
domain=INFRARED_DOMAIN,
|
||||
include_entities=[entity.entity_id for entity in entities],
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -7,7 +7,6 @@ from collections.abc import Callable
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN = "kitchen_sink"
|
||||
CONF_INFRARED_ENTITY_ID = "infrared_entity_id"
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
"""Demo platform that offers a fake infrared fan entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import infrared_protocols
|
||||
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.components.infrared import async_send_command
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
|
||||
from .const import CONF_INFRARED_ENTITY_ID, DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
DUMMY_FAN_ADDRESS = 0x1234
|
||||
DUMMY_CMD_POWER_ON = 0x01
|
||||
DUMMY_CMD_POWER_OFF = 0x02
|
||||
DUMMY_CMD_SPEED_LOW = 0x03
|
||||
DUMMY_CMD_SPEED_MEDIUM = 0x04
|
||||
DUMMY_CMD_SPEED_HIGH = 0x05
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the demo infrared fan platform."""
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if subentry.subentry_type != "infrared_fan":
|
||||
continue
|
||||
async_add_entities(
|
||||
[
|
||||
DemoInfraredFan(
|
||||
subentry_id=subentry_id,
|
||||
device_name=subentry.title,
|
||||
infrared_entity_id=subentry.data[CONF_INFRARED_ENTITY_ID],
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class DemoInfraredFan(FanEntity):
|
||||
"""Representation of a demo infrared fan entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_should_poll = False
|
||||
_attr_assumed_state = True
|
||||
_attr_speed_count = 3
|
||||
_attr_supported_features = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
subentry_id: str,
|
||||
device_name: str,
|
||||
infrared_entity_id: str,
|
||||
) -> None:
|
||||
"""Initialize the demo infrared fan entity."""
|
||||
self._infrared_entity_id = infrared_entity_id
|
||||
self._attr_unique_id = subentry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry_id)},
|
||||
name=device_name,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to infrared entity state changes."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def _async_ir_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle infrared entity state changes."""
|
||||
new_state = event.data["new_state"]
|
||||
self._attr_available = (
|
||||
new_state is not None and new_state.state != STATE_UNAVAILABLE
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, [self._infrared_entity_id], _async_ir_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
# Set initial availability based on current infrared entity state
|
||||
ir_state = self.hass.states.get(self._infrared_entity_id)
|
||||
self._attr_available = (
|
||||
ir_state is not None and ir_state.state != STATE_UNAVAILABLE
|
||||
)
|
||||
|
||||
async def _send_command(self, command_code: int) -> None:
|
||||
"""Send an IR command using the NEC protocol."""
|
||||
command = infrared_protocols.NECCommand(
|
||||
address=DUMMY_FAN_ADDRESS,
|
||||
command=command_code,
|
||||
modulation=38000,
|
||||
)
|
||||
await async_send_command(
|
||||
self.hass, self._infrared_entity_id, command, context=self._context
|
||||
)
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
preset_mode: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
if percentage is not None:
|
||||
await self.async_set_percentage(percentage)
|
||||
return
|
||||
await self._send_command(DUMMY_CMD_POWER_ON)
|
||||
self._attr_percentage = 33
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the fan."""
|
||||
await self._send_command(DUMMY_CMD_POWER_OFF)
|
||||
self._attr_percentage = 0
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed percentage of the fan."""
|
||||
if percentage == 0:
|
||||
await self.async_turn_off()
|
||||
return
|
||||
|
||||
if percentage <= 33:
|
||||
await self._send_command(DUMMY_CMD_SPEED_LOW)
|
||||
elif percentage <= 66:
|
||||
await self._send_command(DUMMY_CMD_SPEED_MEDIUM)
|
||||
else:
|
||||
await self._send_command(DUMMY_CMD_SPEED_HIGH)
|
||||
|
||||
self._attr_percentage = percentage
|
||||
self.async_write_ha_state()
|
||||
@@ -1,65 +0,0 @@
|
||||
"""Demo platform that offers a fake infrared entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import infrared_protocols
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.infrared import InfraredEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the demo infrared platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoInfrared(
|
||||
unique_id="ir_transmitter",
|
||||
device_name="IR Blaster",
|
||||
entity_name="Infrared Transmitter",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoInfrared(InfraredEntity):
|
||||
"""Representation of a demo infrared entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
device_name: str,
|
||||
entity_name: str,
|
||||
) -> None:
|
||||
"""Initialize the demo infrared entity."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=device_name,
|
||||
)
|
||||
self._attr_name = entity_name
|
||||
|
||||
async def async_send_command(self, command: infrared_protocols.Command) -> None:
|
||||
"""Send an IR command."""
|
||||
timings = [
|
||||
interval
|
||||
for timing in command.get_raw_timings()
|
||||
for interval in (timing.high_us, -timing.low_us)
|
||||
]
|
||||
persistent_notification.async_create(
|
||||
self.hass, str(timings), title="Infrared Command"
|
||||
)
|
||||
@@ -101,8 +101,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if subentry.subentry_type != "entity":
|
||||
continue
|
||||
async_add_entities(
|
||||
[
|
||||
DemoSensor(
|
||||
|
||||
@@ -32,24 +32,6 @@
|
||||
"description": "Reconfigure the sensor"
|
||||
}
|
||||
}
|
||||
},
|
||||
"infrared_fan": {
|
||||
"abort": {
|
||||
"no_emitters": "No infrared transmitter entities found. Please set up an infrared device first."
|
||||
},
|
||||
"entry_type": "Infrared fan",
|
||||
"initiate_flow": {
|
||||
"user": "Add infrared fan"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"infrared_entity_id": "Infrared transmitter",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"description": "Select an infrared transmitter to control the fan."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"device": {
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from math import floor
|
||||
from typing import Any
|
||||
@@ -21,6 +23,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .const import LOGGER
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
@@ -70,6 +73,7 @@ class MatterCoverEntityDescription(CoverEntityDescription, MatterEntityDescripti
|
||||
class MatterCover(MatterEntity, CoverEntity):
|
||||
"""Representation of a Matter Cover."""
|
||||
|
||||
_cancel_write_state: Callable[[], None] | None = None
|
||||
entity_description: MatterCoverEntityDescription
|
||||
|
||||
@property
|
||||
@@ -114,6 +118,30 @@ class MatterCover(MatterEntity, CoverEntity):
|
||||
clusters.WindowCovering.Commands.GoToTiltPercentage((100 - position) * 100)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _on_matter_event(self, event: Any, data: Any = None) -> None:
|
||||
"""Handle updates from the device."""
|
||||
self._attr_available = self._endpoint.node.available
|
||||
self._update_from_device()
|
||||
if self._cancel_write_state is not None:
|
||||
self._cancel_write_state()
|
||||
self._cancel_write_state = async_call_later(
|
||||
self.hass, 0.1, self._async_write_state_later
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_write_state_later(self, now: datetime) -> None:
|
||||
"""Write the Home Assistant state after debouncing attribute updates."""
|
||||
self._cancel_write_state = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if self._cancel_write_state is not None:
|
||||
self._cancel_write_state()
|
||||
self._cancel_write_state = None
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
"""Update from device."""
|
||||
|
||||
@@ -512,11 +512,6 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
options.pop(CONF_WEB_SEARCH_REGION, None)
|
||||
options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||
options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||
if (
|
||||
user_input.get(CONF_CODE_INTERPRETER)
|
||||
and user_input.get(CONF_REASONING_EFFORT) == "minimal"
|
||||
):
|
||||
errors[CONF_CODE_INTERPRETER] = "code_interpreter_minimal_reasoning"
|
||||
|
||||
options.update(user_input)
|
||||
if not errors:
|
||||
@@ -544,15 +539,15 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
if not model.startswith(("o", "gpt-5")) or model.startswith("gpt-5-pro"):
|
||||
return []
|
||||
|
||||
models_reasoning_map: dict[str | tuple[str, ...], list[str]] = {
|
||||
MODELS_REASONING_MAP = {
|
||||
"gpt-5.2-pro": ["medium", "high", "xhigh"],
|
||||
("gpt-5.2", "gpt-5.3"): ["none", "low", "medium", "high", "xhigh"],
|
||||
"gpt-5.2": ["none", "low", "medium", "high", "xhigh"],
|
||||
"gpt-5.1": ["none", "low", "medium", "high"],
|
||||
"gpt-5": ["minimal", "low", "medium", "high"],
|
||||
"": ["low", "medium", "high"], # The default case
|
||||
}
|
||||
|
||||
for prefix, options in models_reasoning_map.items():
|
||||
for prefix, options in MODELS_REASONING_MAP.items():
|
||||
if model.startswith(prefix):
|
||||
return options
|
||||
return [] # pragma: no cover
|
||||
|
||||
@@ -38,7 +38,6 @@
|
||||
},
|
||||
"entry_type": "AI task",
|
||||
"error": {
|
||||
"code_interpreter_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::code_interpreter_minimal_reasoning%]",
|
||||
"model_not_supported": "[%key:component::openai_conversation::config_subentries::conversation::error::model_not_supported%]",
|
||||
"web_search_minimal_reasoning": "[%key:component::openai_conversation::config_subentries::conversation::error::web_search_minimal_reasoning%]"
|
||||
},
|
||||
@@ -94,7 +93,6 @@
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
"error": {
|
||||
"code_interpreter_minimal_reasoning": "Code interpreter is not supported with minimal reasoning effort",
|
||||
"model_not_supported": "This model is not supported, please select a different model",
|
||||
"web_search_minimal_reasoning": "Web search is currently not supported with minimal reasoning effort"
|
||||
},
|
||||
|
||||
@@ -1,51 +1 @@
|
||||
"""The orvibo integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from orvibo.s20 import S20, S20Exception
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import S20ConfigEntry
|
||||
|
||||
PLATFORMS = [Platform.SWITCH]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: core.HomeAssistant, entry: S20ConfigEntry) -> bool:
|
||||
"""Set up platform from a ConfigEntry."""
|
||||
|
||||
try:
|
||||
s20 = await hass.async_add_executor_job(
|
||||
S20,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_MAC],
|
||||
)
|
||||
_LOGGER.debug("Initialized S20 at %s", entry.data[CONF_HOST])
|
||||
except S20Exception as err:
|
||||
_LOGGER.debug("S20 at %s couldn't be initialized", entry.data[CONF_HOST])
|
||||
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="init_error",
|
||||
translation_placeholders={
|
||||
"host": entry.data[CONF_HOST],
|
||||
},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = s20
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: S20ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
"""The orvibo component."""
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
"""Config flow for the orvibo integration."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from orvibo.s20 import S20, S20Exception, discover
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import CONF_SWITCH_LIST, DEFAULT_NAME, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FULL_EDIT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_MAC): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class S20ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the config flow for Orvibo S20 switches."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize an instance of the S20 config flow."""
|
||||
self.discovery_task: asyncio.Task | None = None
|
||||
self._discovered_switches: dict[str, dict[str, Any]] = {}
|
||||
self.chosen_switch: dict[str, Any] = {}
|
||||
|
||||
async def _async_discover(self) -> None:
|
||||
def _filter_discovered_switches(
|
||||
switches: dict[str, dict[str, Any]],
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
# Get existing unique_ids from config entries
|
||||
existing_ids = {entry.unique_id for entry in self._async_current_entries()}
|
||||
_LOGGER.debug("Existing unique IDs: %s", existing_ids)
|
||||
# Build a new filtered dict
|
||||
filtered = {}
|
||||
for ip, info in switches.items():
|
||||
mac_bytes = info.get("mac")
|
||||
if not mac_bytes:
|
||||
continue # skip if no MAC
|
||||
|
||||
unique_id = format_mac(mac_bytes.hex()).lower()
|
||||
if unique_id not in existing_ids:
|
||||
filtered[ip] = info
|
||||
_LOGGER.debug("New switches: %s", filtered)
|
||||
return filtered
|
||||
|
||||
# Discover S20 devices.
|
||||
_LOGGER.debug("Discovering S20 switches")
|
||||
|
||||
_unfiltered_switches = await self.hass.async_add_executor_job(discover)
|
||||
_LOGGER.debug("All discovered switches: %s", _unfiltered_switches)
|
||||
|
||||
self._discovered_switches = _filter_discovered_switches(_unfiltered_switches)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="user", menu_options=["start_discovery", "edit"]
|
||||
)
|
||||
|
||||
async def _validate_input(self, user_input: dict[str, Any]) -> str | None:
|
||||
"""Validate user input and discover MAC if missing."""
|
||||
|
||||
if user_input.get(CONF_MAC):
|
||||
user_input[CONF_MAC] = format_mac(user_input[CONF_MAC]).lower()
|
||||
if len(user_input[CONF_MAC]) != 17 or user_input[CONF_MAC].count(":") != 5:
|
||||
return "invalid_mac"
|
||||
|
||||
try:
|
||||
device = await self.hass.async_add_executor_job(
|
||||
S20,
|
||||
user_input[CONF_HOST],
|
||||
user_input.get(CONF_MAC),
|
||||
)
|
||||
|
||||
if not user_input.get(CONF_MAC):
|
||||
# Using private attribute access here since S20 class doesn't have a public method to get the MAC without repeating discovery
|
||||
if not device._mac: # noqa: SLF001
|
||||
return "cannot_discover"
|
||||
user_input[CONF_MAC] = format_mac(device._mac.hex()).lower() # noqa: SLF001
|
||||
|
||||
except S20Exception:
|
||||
return "cannot_connect"
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_edit(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Edit a discovered or manually configured server."""
|
||||
|
||||
errors = {}
|
||||
if user_input:
|
||||
error = await self._validate_input(user_input)
|
||||
if not error:
|
||||
await self.async_set_unique_id(user_input[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"{DEFAULT_NAME} ({user_input[CONF_HOST]})", data=user_input
|
||||
)
|
||||
errors["base"] = error
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="edit",
|
||||
data_schema=FULL_EDIT_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_start_discovery(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if not self.discovery_task:
|
||||
self.discovery_task = self.hass.async_create_task(self._async_discover())
|
||||
return self.async_show_progress(
|
||||
step_id="start_discovery",
|
||||
progress_action="start_discovery",
|
||||
progress_task=self.discovery_task,
|
||||
)
|
||||
if self.discovery_task.done():
|
||||
try:
|
||||
self.discovery_task.result()
|
||||
except (S20Exception, OSError) as err:
|
||||
_LOGGER.debug("Discovery task failed: %s", err)
|
||||
self.discovery_task = None
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=(
|
||||
"choose_switch" if self._discovered_switches else "discovery_failed"
|
||||
)
|
||||
)
|
||||
return self.async_show_progress(
|
||||
step_id="start_discovery",
|
||||
progress_action="start_discovery",
|
||||
progress_task=self.discovery_task,
|
||||
)
|
||||
|
||||
async def async_step_choose_switch(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Choose manual or discover flow."""
|
||||
_chosen_host: str
|
||||
|
||||
if user_input:
|
||||
_chosen_host = user_input[CONF_SWITCH_LIST]
|
||||
for host, data in self._discovered_switches.items():
|
||||
if _chosen_host == host:
|
||||
self.chosen_switch[CONF_HOST] = host
|
||||
self.chosen_switch[CONF_MAC] = format_mac(
|
||||
data[CONF_MAC].hex()
|
||||
).lower()
|
||||
await self.async_set_unique_id(self.chosen_switch[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"{DEFAULT_NAME} ({host})", data=self.chosen_switch
|
||||
)
|
||||
_LOGGER.debug("discovered switches: %s", self._discovered_switches)
|
||||
|
||||
_options = {
|
||||
host: f"{host} ({format_mac(data[CONF_MAC].hex()).lower()})"
|
||||
for host, data in self._discovered_switches.items()
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="choose_switch",
|
||||
data_schema=vol.Schema({vol.Required(CONF_SWITCH_LIST): vol.In(_options)}),
|
||||
)
|
||||
|
||||
async def async_step_discovery_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a failed discovery."""
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="discovery_failed", menu_options=["start_discovery", "edit"]
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import from configuration.yaml."""
|
||||
_LOGGER.debug("Importing config: %s", user_input)
|
||||
|
||||
error = await self._validate_input(user_input)
|
||||
if error:
|
||||
return self.async_abort(reason=error)
|
||||
|
||||
await self.async_set_unique_id(user_input[CONF_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input.get(CONF_NAME, user_input[CONF_HOST]), data=user_input
|
||||
)
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Constants for the orvibo integration."""
|
||||
|
||||
DOMAIN = "orvibo"
|
||||
DEFAULT_NAME = "S20"
|
||||
CONF_SWITCH_LIST = "switches"
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "orvibo",
|
||||
"name": "Orvibo",
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/orvibo",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["orvibo"],
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
"""Data models for the Orvibo integration."""
|
||||
|
||||
from orvibo.s20 import S20
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
type S20ConfigEntry = ConfigEntry[S20]
|
||||
@@ -1,71 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "Unable to connect to the S20 switch",
|
||||
"cannot_discover": "Unable to discover MAC address of S20 switch. Please enter the MAC address.",
|
||||
"invalid_mac": "Invalid MAC address format"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:component::orvibo::config::abort::cannot_connect%]",
|
||||
"cannot_discover": "[%key:component::orvibo::config::abort::cannot_discover%]",
|
||||
"invalid_mac": "Invalid MAC address format"
|
||||
},
|
||||
"progress": {
|
||||
"start_discovery": "Attempting to discover new S20 switches\n\nThis will take about 3 seconds\n\nDiscovery may fail if the switch is asleep. If your switch does not appear, please power toggle your switch before re-running discovery.",
|
||||
"title": "Orvibo S20"
|
||||
},
|
||||
"step": {
|
||||
"choose_switch": {
|
||||
"data": {
|
||||
"switches": "Choose discovered switch to configure"
|
||||
},
|
||||
"title": "Discovered switches"
|
||||
},
|
||||
"discovery_failed": {
|
||||
"description": "No S20 switches were discovered on the network. Discovery may have failed if the switch is asleep. Please power toggle your switch before re-running discovery.",
|
||||
"menu_options": {
|
||||
"edit": "Enter configuration manually",
|
||||
"start_discovery": "Try discovering again"
|
||||
},
|
||||
"title": "Discovery failed"
|
||||
},
|
||||
"edit": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"mac": "MAC address"
|
||||
},
|
||||
"title": "Configure Orvibo S20 switch"
|
||||
},
|
||||
"user": {
|
||||
"menu_options": {
|
||||
"edit": "Enter configuration manually",
|
||||
"start_discovery": "Discover new S20 switches"
|
||||
},
|
||||
"title": "Orvibo S20 Configuration"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"init_error": {
|
||||
"message": "Error while initializing S20 {host}."
|
||||
},
|
||||
"turn_off_error": {
|
||||
"message": "Error while turning off S20 {name}."
|
||||
},
|
||||
"turn_on_error": {
|
||||
"message": "Error while turning on S20 {name}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"yaml_deprecation": {
|
||||
"description": "The device (MAC: {mac}, Host: {host}) is configured in `configuration.yaml`. The Orvibo integration now supports UI-based configuration and this device has been migrated to the new UI. Please remove the YAML block from `configuration.yaml` to avoid future issues.",
|
||||
"title": "Legacy YAML configuration detected {host}"
|
||||
},
|
||||
"yaml_deprecation_import_issue": {
|
||||
"description": "Attempting to import this device (MAC: {mac}, Host: {host}) from YAML has failed for reason {reason}. 1) Remove the YAML block from `configuration.yaml`, 2) Restart Home Assistant, 3) Add the device using the UI configuration flow.",
|
||||
"title": "Legacy YAML configuration import issue for {host}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,13 @@
|
||||
"""Switch platform for the Orvibo integration."""
|
||||
"""Support for Orvibo S20 Wifi Smart Switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from orvibo.s20 import S20, S20Exception
|
||||
from orvibo.s20 import S20, S20Exception, discover
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.switch import (
|
||||
PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA,
|
||||
SwitchEntity,
|
||||
@@ -21,25 +20,14 @@ from homeassistant.const import (
|
||||
CONF_SWITCHES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
from .models import S20ConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DISCOVERY = False
|
||||
|
||||
# Library is not thread safe and uses global variables, so we limit to 1 update at a time
|
||||
PARALLEL_UPDATES = 1
|
||||
DEFAULT_NAME = "Orvibo S20 Switch"
|
||||
DEFAULT_DISCOVERY = True
|
||||
|
||||
PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
@@ -58,138 +46,65 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities_callback: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the integration from configuration.yaml."""
|
||||
for switch in config.get(CONF_SWITCHES, []):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data=switch,
|
||||
)
|
||||
"""Set up S20 switches."""
|
||||
|
||||
if (
|
||||
result.get("type") is FlowResultType.ABORT
|
||||
and result.get("reason") != "already_configured"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"yaml_deprecation_import_issue_{switch.get('host')}_{(switch.get('mac') or 'unknown_mac').replace(':', '').lower()}",
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="yaml_deprecation_import_issue",
|
||||
translation_placeholders={
|
||||
"reason": str(result.get("reason")),
|
||||
"host": switch.get("host"),
|
||||
"mac": switch.get("mac", ""),
|
||||
},
|
||||
switch_data = {}
|
||||
switches = []
|
||||
switch_conf = config.get(CONF_SWITCHES, [config])
|
||||
|
||||
if config.get(CONF_DISCOVERY):
|
||||
_LOGGER.debug("Discovering S20 switches")
|
||||
switch_data.update(discover())
|
||||
|
||||
for switch in switch_conf:
|
||||
switch_data[switch.get(CONF_HOST)] = switch
|
||||
|
||||
for host, data in switch_data.items():
|
||||
try:
|
||||
switches.append(
|
||||
S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC)))
|
||||
)
|
||||
continue
|
||||
_LOGGER.debug("Initialized S20 at %s", host)
|
||||
except S20Exception:
|
||||
_LOGGER.error("S20 at %s couldn't be initialized", host)
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"yaml_deprecation_{switch.get('host')}_{(switch.get('mac') or 'unknown_mac').replace(':', '').lower()}",
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="yaml_deprecation",
|
||||
translation_placeholders={
|
||||
"host": switch.get("host"),
|
||||
"mac": switch.get("mac") or "Unknown MAC",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: S20ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up orvibo from a config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
S20Switch(
|
||||
entry.title,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_MAC],
|
||||
entry.runtime_data,
|
||||
)
|
||||
]
|
||||
)
|
||||
add_entities_callback(switches)
|
||||
|
||||
|
||||
class S20Switch(SwitchEntity):
|
||||
"""Representation of an S20 switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, name: str, host: str, mac: str, s20: S20) -> None:
|
||||
def __init__(self, name, s20):
|
||||
"""Initialize the S20 device."""
|
||||
|
||||
self._attr_is_on = False
|
||||
self._host = host
|
||||
self._mac = mac
|
||||
self._attr_name = name
|
||||
self._s20 = s20
|
||||
self._attr_unique_id = self._mac
|
||||
self._name = name
|
||||
self._attr_name = None
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
# MAC addresses are used as unique identifiers within this domain
|
||||
(DOMAIN, self._attr_unique_id)
|
||||
},
|
||||
name=name,
|
||||
manufacturer="Orvibo",
|
||||
model="S20",
|
||||
connections={(CONNECTION_NETWORK_MAC, self._mac)},
|
||||
)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
try:
|
||||
self._s20.on = True
|
||||
except S20Exception as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="turn_on_error",
|
||||
translation_placeholders={"name": self._name},
|
||||
) from err
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the device off."""
|
||||
try:
|
||||
self._s20.on = False
|
||||
except S20Exception as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="turn_off_error",
|
||||
translation_placeholders={"name": self._name},
|
||||
) from err
|
||||
self._attr_is_on = False
|
||||
self._exc = S20Exception
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update device state."""
|
||||
try:
|
||||
self._attr_is_on = self._s20.on
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while fetching S20 state")
|
||||
|
||||
# If the device was previously offline, let the user know it's back!
|
||||
if not self._attr_available:
|
||||
_LOGGER.info("Orvibo switch %s reconnected", self._name)
|
||||
self._attr_available = True
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
try:
|
||||
self._s20.on = True
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while turning on S20")
|
||||
|
||||
except S20Exception as err:
|
||||
# Only log the error if this is the FIRST time it failed
|
||||
if self._attr_available:
|
||||
_LOGGER.info(
|
||||
"Error communicating with Orvibo switch %s: %s", self._name, err
|
||||
)
|
||||
self._attr_available = False
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the device off."""
|
||||
try:
|
||||
self._s20.on = False
|
||||
except self._exc:
|
||||
_LOGGER.exception("Error while turning off S20")
|
||||
|
||||
@@ -137,10 +137,11 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
|
||||
|
||||
_attr_effect: str
|
||||
_attr_translation_key = "ambilight"
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
_attr_supported_features = LightEntityFeature.EFFECT
|
||||
|
||||
def __init__(self, coordinator: PhilipsTVDataUpdateCoordinator) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PhilipsTVDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize light."""
|
||||
self._tv = coordinator.api
|
||||
self._hs = None
|
||||
@@ -149,6 +150,8 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
|
||||
self._last_selected_effect: AmbilightEffect | None = None
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_supported_color_modes = {ColorMode.HS, ColorMode.ONOFF}
|
||||
self._attr_supported_features = LightEntityFeature.EFFECT
|
||||
self._attr_unique_id = coordinator.unique_id
|
||||
|
||||
self._update_from_coordinator()
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PortainerConfigEntry
|
||||
from .const import CONTAINER_STATE_RUNNING, STACK_STATUS_ACTIVE
|
||||
from .coordinator import PortainerContainerData
|
||||
from .coordinator import PortainerContainerData, PortainerCoordinator
|
||||
from .entity import (
|
||||
PortainerContainerEntity,
|
||||
PortainerCoordinatorData,
|
||||
@@ -165,6 +165,18 @@ class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerEndpointBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerEndpointBinarySensorEntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize Portainer endpoint binary sensor entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -176,6 +188,19 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerContainerBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerContainerBinarySensorEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -187,6 +212,19 @@ class PortainerStackSensor(PortainerStackEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: PortainerStackBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackBinarySensorEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
|
||||
@@ -167,6 +167,18 @@ class PortainerEndpointButton(PortainerEndpointEntity, PortainerBaseButton):
|
||||
|
||||
entity_description: PortainerButtonDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerButtonDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer endpoint button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Call the endpoint button press action."""
|
||||
await self.entity_description.press_action(
|
||||
@@ -179,6 +191,19 @@ class PortainerContainerButton(PortainerContainerEntity, PortainerBaseButton):
|
||||
|
||||
entity_description: PortainerButtonDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerButtonDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Call the container button press action."""
|
||||
await self.entity_description.press_action(
|
||||
|
||||
@@ -170,11 +170,11 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
docker_system_df,
|
||||
stacks,
|
||||
) = await asyncio.gather(
|
||||
self.portainer.get_containers(endpoint.id),
|
||||
self.portainer.docker_version(endpoint.id),
|
||||
self.portainer.docker_info(endpoint.id),
|
||||
self.portainer.get_containers(endpoint_id=endpoint.id),
|
||||
self.portainer.docker_version(endpoint_id=endpoint.id),
|
||||
self.portainer.docker_info(endpoint_id=endpoint.id),
|
||||
self.portainer.docker_system_df(endpoint.id),
|
||||
self.portainer.get_stacks(endpoint.id),
|
||||
self.portainer.get_stacks(endpoint_id=endpoint.id),
|
||||
)
|
||||
|
||||
prev_endpoint = self.data.get(endpoint.id) if self.data else None
|
||||
|
||||
@@ -4,7 +4,6 @@ from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
@@ -27,13 +26,11 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
coordinator: PortainerCoordinator,
|
||||
) -> None:
|
||||
"""Initialize a Portainer endpoint."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.device_id = device_info.endpoint.id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -48,7 +45,6 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
name=device_info.endpoint.name,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -61,14 +57,12 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
coordinator: PortainerCoordinator,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize a Portainer container."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.device_id = self._device_info.container.id
|
||||
self.endpoint_id = via_device.endpoint.id
|
||||
@@ -97,14 +91,13 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
# else it's the endpoint
|
||||
via_device=(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{device_info.stack.id}"
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{device_info.stack.name}"
|
||||
if device_info.stack
|
||||
else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
translation_key=None if self.device_name else "unknown_container",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -126,14 +119,12 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
coordinator: PortainerCoordinator,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize a Portainer stack."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_info = device_info
|
||||
self.stack_id = device_info.stack.id
|
||||
self.device_name = device_info.stack.name
|
||||
@@ -144,7 +135,7 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{self.stack_id}",
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}",
|
||||
)
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
@@ -158,7 +149,6 @@ class PortainerStackEntity(PortainerCoordinatorEntity):
|
||||
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.stack_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -21,6 +21,7 @@ from .const import STACK_TYPE_COMPOSE, STACK_TYPE_KUBERNETES, STACK_TYPE_SWARM
|
||||
from .coordinator import (
|
||||
PortainerConfigEntry,
|
||||
PortainerContainerData,
|
||||
PortainerCoordinator,
|
||||
PortainerStackData,
|
||||
)
|
||||
from .entity import (
|
||||
@@ -397,6 +398,19 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerContainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerContainerSensorEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
@@ -408,6 +422,18 @@ class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerEndpointSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerEndpointSensorEntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer endpoint sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
@@ -420,6 +446,19 @@ class PortainerStackSensor(PortainerStackEntity, SensorEntity):
|
||||
|
||||
entity_description: PortainerStackSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackSensorEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
|
||||
@@ -167,6 +167,19 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
||||
|
||||
entity_description: PortainerSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerSwitchEntityDescription,
|
||||
device_info: PortainerContainerData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container switch."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
@@ -196,6 +209,19 @@ class PortainerStackSwitch(PortainerStackEntity, SwitchEntity):
|
||||
|
||||
entity_description: PortainerStackSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerStackSwitchEntityDescription,
|
||||
device_info: PortainerStackData,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer stack switch."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NODE_ONLINE, VM_CONTAINER_RUNNING
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxNodeData
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
|
||||
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -147,6 +147,18 @@ class ProxmoxNodeBinarySensor(ProxmoxNodeEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxNodeBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeBinarySensorEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize Proxmox node binary sensor entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -158,6 +170,19 @@ class ProxmoxVMBinarySensor(ProxmoxVMEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxVMBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMBinarySensorEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM binary sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
@@ -169,6 +194,19 @@ class ProxmoxContainerBinarySensor(ProxmoxContainerEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: ProxmoxContainerBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerBinarySensorEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container binary sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
|
||||
@@ -262,6 +262,18 @@ class ProxmoxNodeButtonEntity(ProxmoxNodeEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxNodeButtonNodeEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeButtonNodeEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Node button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the node button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
@@ -276,6 +288,19 @@ class ProxmoxVMButtonEntity(ProxmoxVMEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxVMButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMButtonEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the VM button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
@@ -291,6 +316,19 @@ class ProxmoxContainerButtonEntity(ProxmoxContainerEntity, ProxmoxBaseButton):
|
||||
|
||||
entity_description: ProxmoxContainerButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerButtonEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container button entity."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
async def _async_press_call(self) -> None:
|
||||
"""Execute the container button action via executor."""
|
||||
await self.hass.async_add_executor_job(
|
||||
|
||||
@@ -8,7 +8,6 @@ from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -37,7 +36,6 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox node entity."""
|
||||
@@ -45,7 +43,6 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
self._node_data = node_data
|
||||
self.device_id = node_data.node["id"]
|
||||
self.device_name = node_data.node["node"]
|
||||
self.entity_description = entity_description
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{coordinator.config_entry.entry_id}_node_{self.device_id}")
|
||||
@@ -57,8 +54,6 @@ class ProxmoxNodeEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
@@ -71,13 +66,11 @@ class ProxmoxVMEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._vm_data = vm_data
|
||||
self._node_name = node_data.node["node"]
|
||||
self.device_id = vm_data["vmid"]
|
||||
@@ -98,8 +91,6 @@ class ProxmoxVMEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
@@ -121,13 +112,11 @@ class ProxmoxContainerEntity(ProxmoxCoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox Container entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._container_data = container_data
|
||||
self._node_name = node_data.node["node"]
|
||||
self.device_id = container_data["vmid"]
|
||||
@@ -151,8 +140,6 @@ class ProxmoxContainerEntity(ProxmoxCoordinatorEntity):
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.const import PERCENTAGE, UnitOfInformation
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxNodeData
|
||||
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
|
||||
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
|
||||
|
||||
|
||||
@@ -320,6 +320,18 @@ class ProxmoxNodeSensor(ProxmoxNodeEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxNodeSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxNodeSensorEntityDescription,
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, node_data)
|
||||
self.entity_description = entity_description
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
@@ -331,6 +343,19 @@ class ProxmoxVMSensor(ProxmoxVMEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxVMSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxVMSensorEntityDescription,
|
||||
vm_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox VM sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, vm_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
@@ -342,6 +367,19 @@ class ProxmoxContainerSensor(ProxmoxContainerEntity, SensorEntity):
|
||||
|
||||
entity_description: ProxmoxContainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ProxmoxCoordinator,
|
||||
entity_description: ProxmoxContainerSensorEntityDescription,
|
||||
container_data: dict[str, Any],
|
||||
node_data: ProxmoxNodeData,
|
||||
) -> None:
|
||||
"""Initialize the Proxmox container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(coordinator, container_data, node_data)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the native value of the sensor."""
|
||||
|
||||
@@ -32,14 +32,6 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::proxmoxve::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::proxmoxve::config::step::user::data_description::password%]",
|
||||
"port": "[%key:component::proxmoxve::config::step::user::data_description::port%]",
|
||||
"realm": "[%key:component::proxmoxve::config::step::user::data_description::realm%]",
|
||||
"username": "[%key:component::proxmoxve::config::step::user::data_description::username%]",
|
||||
"verify_ssl": "[%key:component::proxmoxve::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Proxmox VE server connection.",
|
||||
"title": "Reconfigure Proxmox VE integration"
|
||||
},
|
||||
@@ -52,14 +44,6 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Proxmox VE server",
|
||||
"password": "The password for the Proxmox VE server",
|
||||
"port": "The port of your Proxmox VE server (default: 8006)",
|
||||
"realm": "The authentication realm for the Proxmox VE server (default: 'pam')",
|
||||
"username": "The username for the Proxmox VE server",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
},
|
||||
"description": "Enter your Proxmox VE server details to set up the integration.",
|
||||
"title": "Connect to Proxmox VE"
|
||||
}
|
||||
|
||||
@@ -4,16 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from pycognito.exceptions import WarrantException
|
||||
import pyschlage
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, SERVICE_ADD_CODE, SERVICE_DELETE_CODE, SERVICE_GET_CODES
|
||||
from .coordinator import SchlageConfigEntry, SchlageDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@@ -24,46 +19,6 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Schlage component."""
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADD_CODE,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required("name"): cv.string,
|
||||
vol.Required("code"): cv.matches_regex(r"^\d{4,8}$"),
|
||||
},
|
||||
func=SERVICE_ADD_CODE,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DELETE_CODE,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required("name"): cv.string,
|
||||
},
|
||||
func=SERVICE_DELETE_CODE,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_CODES,
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema=None,
|
||||
func=SERVICE_GET_CODES,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool:
|
||||
"""Set up Schlage from a config entry."""
|
||||
|
||||
@@ -7,7 +7,3 @@ DOMAIN = "schlage"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
MANUFACTURER = "Schlage"
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
SERVICE_ADD_CODE = "add_code"
|
||||
SERVICE_DELETE_CODE = "delete_code"
|
||||
SERVICE_GET_CODES = "get_codes"
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"add_code": {
|
||||
"service": "mdi:key-plus"
|
||||
},
|
||||
"delete_code": {
|
||||
"service": "mdi:key-minus"
|
||||
},
|
||||
"get_codes": {
|
||||
"service": "mdi:table-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,15 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyschlage.code import AccessCode
|
||||
from pyschlage.exceptions import Error as SchlageError
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LockData, SchlageConfigEntry, SchlageDataUpdateCoordinator
|
||||
from .entity import SchlageEntity
|
||||
|
||||
@@ -69,108 +64,3 @@ class SchlageLockEntity(SchlageEntity, LockEntity):
|
||||
"""Unlock the device."""
|
||||
await self.hass.async_add_executor_job(self._lock.unlock)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@staticmethod
|
||||
def _normalize_code_name(name: str) -> str:
|
||||
"""Normalize a code name for comparison."""
|
||||
return name.lower().strip()
|
||||
|
||||
def _validate_code_name(
|
||||
self, codes: dict[str, AccessCode] | None, name: str
|
||||
) -> None:
|
||||
"""Validate that the code name doesn't already exist."""
|
||||
normalized = self._normalize_code_name(name)
|
||||
if codes and any(
|
||||
self._normalize_code_name(code.name) == normalized
|
||||
for code in codes.values()
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_name_exists",
|
||||
translation_placeholders={"name": name},
|
||||
)
|
||||
|
||||
def _validate_code_value(
|
||||
self, codes: dict[str, AccessCode] | None, code: str
|
||||
) -> None:
|
||||
"""Validate that the code value doesn't already exist."""
|
||||
if codes and any(
|
||||
existing_code.code == code for existing_code in codes.values()
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_code_exists",
|
||||
)
|
||||
|
||||
async def _async_fetch_access_codes(self) -> dict[str, AccessCode] | None:
|
||||
"""Fetch access codes from the lock on demand."""
|
||||
try:
|
||||
await self.hass.async_add_executor_job(self._lock.refresh_access_codes)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_refresh_failed",
|
||||
) from ex
|
||||
return self._lock.access_codes
|
||||
|
||||
async def add_code(self, name: str, code: str) -> None:
|
||||
"""Add a lock code."""
|
||||
|
||||
codes = await self._async_fetch_access_codes()
|
||||
self._validate_code_name(codes, name)
|
||||
self._validate_code_value(codes, code)
|
||||
|
||||
access_code = AccessCode(name=name, code=code)
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
self._lock.add_access_code, access_code
|
||||
)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_add_code_failed",
|
||||
) from ex
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def delete_code(self, name: str) -> None:
|
||||
"""Delete a lock code."""
|
||||
codes = await self._async_fetch_access_codes()
|
||||
if not codes:
|
||||
return
|
||||
|
||||
normalized = self._normalize_code_name(name)
|
||||
code_id_to_delete = next(
|
||||
(
|
||||
code_id
|
||||
for code_id, code_data in codes.items()
|
||||
if self._normalize_code_name(code_data.name) == normalized
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not code_id_to_delete:
|
||||
# Code not found in defined codes, operation successful
|
||||
return
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(codes[code_id_to_delete].delete)
|
||||
except SchlageError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="schlage_delete_code_failed",
|
||||
) from ex
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def get_codes(self) -> ServiceResponse:
|
||||
"""Get lock codes."""
|
||||
await self._async_fetch_access_codes()
|
||||
|
||||
if self._lock.access_codes:
|
||||
return {
|
||||
code: {
|
||||
"name": self._lock.access_codes[code].name,
|
||||
"code": self._lock.access_codes[code].code,
|
||||
}
|
||||
for code in self._lock.access_codes
|
||||
}
|
||||
return {}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
get_codes:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
|
||||
add_code:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
example: "Example Person"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
code:
|
||||
required: true
|
||||
example: "1111"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
type: password
|
||||
|
||||
delete_code:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: schlage
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
example: "Example Person"
|
||||
selector:
|
||||
text:
|
||||
multiline: false
|
||||
@@ -56,50 +56,8 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"schlage_add_code_failed": {
|
||||
"message": "Failed to add PIN code to the lock."
|
||||
},
|
||||
"schlage_code_exists": {
|
||||
"message": "A PIN code with this value already exists on the lock."
|
||||
},
|
||||
"schlage_delete_code_failed": {
|
||||
"message": "Failed to delete PIN code from the lock."
|
||||
},
|
||||
"schlage_name_exists": {
|
||||
"message": "A PIN code with the name \"{name}\" already exists on the lock."
|
||||
},
|
||||
"schlage_refresh_failed": {
|
||||
"message": "Failed to refresh Schlage data."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_code": {
|
||||
"description": "Adds a PIN code to a lock.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "The PIN code to add. Must be unique to the lock and be between 4 and 8 digits long.",
|
||||
"name": "PIN code"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name for PIN code. Must be case insensitively unique to the lock.",
|
||||
"name": "PIN name"
|
||||
}
|
||||
},
|
||||
"name": "Add PIN code"
|
||||
},
|
||||
"delete_code": {
|
||||
"description": "Deletes a PIN code from a lock.",
|
||||
"fields": {
|
||||
"name": {
|
||||
"description": "Name of PIN code to delete.",
|
||||
"name": "[%key:component::schlage::services::add_code::fields::name::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Delete PIN code"
|
||||
},
|
||||
"get_codes": {
|
||||
"description": "Retrieves all PIN codes from a lock.",
|
||||
"name": "Get PIN codes"
|
||||
"message": "Failed to refresh Schlage data"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1080,6 +1080,7 @@ async def load_data(
|
||||
req = await client.get(url)
|
||||
except (httpx.HTTPError, httpx.InvalidURL) as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load URL: {err!s}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_url",
|
||||
translation_placeholders={"error": str(err)},
|
||||
@@ -1106,6 +1107,7 @@ async def load_data(
|
||||
1
|
||||
) # Add a sleep to allow other async operations to proceed
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load URL: {req.status_code}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_url",
|
||||
translation_placeholders={"error": str(req.status_code)},
|
||||
@@ -1115,11 +1117,13 @@ async def load_data(
|
||||
return await hass.async_add_executor_job(_read_file_as_bytesio, filepath)
|
||||
|
||||
raise ServiceValidationError(
|
||||
"File path has not been configured in allowlist_external_dirs.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="allowlist_external_dirs_error",
|
||||
)
|
||||
else:
|
||||
raise ServiceValidationError(
|
||||
"URL or File is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "URL or File"},
|
||||
@@ -1134,6 +1138,7 @@ def _validate_credentials_input(
|
||||
and not username
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
"Username is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "Username"},
|
||||
@@ -1149,6 +1154,7 @@ def _validate_credentials_input(
|
||||
and not password
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
"Password is required.",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_input",
|
||||
translation_placeholders={"field": "Password"},
|
||||
@@ -1164,6 +1170,7 @@ def _read_file_as_bytesio(file_path: str) -> io.BytesIO:
|
||||
return data
|
||||
except OSError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to load file: {err!s}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_load_file",
|
||||
translation_placeholders={"error": str(err)},
|
||||
|
||||
@@ -48,7 +48,6 @@ from .services import async_setup_services
|
||||
PLATFORMS: Final = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DEVICE_TRACKER,
|
||||
|
||||
@@ -1,282 +0,0 @@
|
||||
"""Calendar platform for Teslemetry integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TeslemetryConfigEntry
|
||||
from .entity import TeslemetryEnergyInfoEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TeslemetryConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Teslemetry Calendar platform from a config entry."""
|
||||
|
||||
entities_to_add: list[CalendarEntity] = []
|
||||
|
||||
entities_to_add.extend(
|
||||
TeslemetryTariffSchedule(energy, "tariff_content_v2")
|
||||
for energy in entry.runtime_data.energysites
|
||||
if energy.info_coordinator.data.get("tariff_content_v2_seasons")
|
||||
)
|
||||
|
||||
entities_to_add.extend(
|
||||
TeslemetryTariffSchedule(energy, "tariff_content_v2_sell_tariff")
|
||||
for energy in entry.runtime_data.energysites
|
||||
if energy.info_coordinator.data.get("tariff_content_v2_sell_tariff_seasons")
|
||||
)
|
||||
|
||||
async_add_entities(entities_to_add)
|
||||
|
||||
|
||||
def _is_day_in_range(day_of_week: int, from_day: int, to_day: int) -> bool:
|
||||
"""Check if a day of week falls within a range, handling week crossing."""
|
||||
if from_day <= to_day:
|
||||
return from_day <= day_of_week <= to_day
|
||||
# Week crossing (e.g., Fri=4 to Mon=0)
|
||||
return day_of_week >= from_day or day_of_week <= to_day
|
||||
|
||||
|
||||
def _parse_period_times(
|
||||
period_def: dict[str, Any],
|
||||
base_day: datetime,
|
||||
) -> tuple[datetime, datetime] | None:
|
||||
"""Parse a TOU period definition into start and end times.
|
||||
|
||||
Returns None if the base_day's weekday doesn't match the period's day range.
|
||||
For periods crossing midnight, end_time will be on the following day.
|
||||
"""
|
||||
# DaysOfWeek are from 0-6 (Monday-Sunday)
|
||||
from_day = period_def.get("fromDayOfWeek", 0)
|
||||
to_day = period_def.get("toDayOfWeek", 6)
|
||||
|
||||
if not _is_day_in_range(base_day.weekday(), from_day, to_day):
|
||||
return None
|
||||
|
||||
# Hours are from 0-23, so 24 hours is 0-0
|
||||
from_hour = period_def.get("fromHour", 0)
|
||||
to_hour = period_def.get("toHour", 0)
|
||||
|
||||
# Minutes are from 0-59, so 60 minutes is 0-0
|
||||
from_minute = period_def.get("fromMinute", 0)
|
||||
to_minute = period_def.get("toMinute", 0)
|
||||
|
||||
start_time = base_day.replace(
|
||||
hour=from_hour, minute=from_minute, second=0, microsecond=0
|
||||
)
|
||||
end_time = base_day.replace(hour=to_hour, minute=to_minute, second=0, microsecond=0)
|
||||
|
||||
if end_time <= start_time:
|
||||
end_time += timedelta(days=1)
|
||||
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def _build_event(
|
||||
key_base: str,
|
||||
season_name: str,
|
||||
period_name: str,
|
||||
price: float | None,
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
) -> CalendarEvent:
|
||||
"""Build a CalendarEvent for a tariff period."""
|
||||
price_str = f"{price:.2f}/kWh" if price is not None else "Unknown Price"
|
||||
return CalendarEvent(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
summary=f"{period_name.capitalize().replace('_', ' ')}: {price_str}",
|
||||
description=(
|
||||
f"Season: {season_name.capitalize()}\n"
|
||||
f"Period: {period_name.capitalize().replace('_', ' ')}\n"
|
||||
f"Price: {price_str}"
|
||||
),
|
||||
uid=f"{key_base}_{season_name}_{period_name}_{start_time.isoformat()}",
|
||||
)
|
||||
|
||||
|
||||
class TeslemetryTariffSchedule(TeslemetryEnergyInfoEntity, CalendarEntity):
|
||||
"""Energy Site Tariff Schedule Calendar."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: Any,
|
||||
key_base: str,
|
||||
) -> None:
|
||||
"""Initialize the tariff schedule calendar."""
|
||||
self.key_base: str = key_base
|
||||
self.seasons: dict[str, dict[str, Any]] = {}
|
||||
self.charges: dict[str, dict[str, Any]] = {}
|
||||
super().__init__(data, key_base)
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the current active tariff event."""
|
||||
now = dt_util.now()
|
||||
current_season_name = self._get_current_season(now)
|
||||
|
||||
if not current_season_name or not self.seasons.get(current_season_name):
|
||||
return None
|
||||
|
||||
# Time of use (TOU) periods define the tariff schedule within a season
|
||||
tou_periods = self.seasons[current_season_name].get("tou_periods", {})
|
||||
|
||||
for period_name, period_group in tou_periods.items():
|
||||
for period_def in period_group.get("periods", []):
|
||||
result = _parse_period_times(period_def, now)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
start_time, end_time = result
|
||||
|
||||
# Check if now falls within this period
|
||||
if not (start_time <= now < end_time):
|
||||
# For cross-midnight periods, check yesterday's instance
|
||||
start_time -= timedelta(days=1)
|
||||
end_time -= timedelta(days=1)
|
||||
if not (start_time <= now < end_time):
|
||||
continue
|
||||
|
||||
price = self._get_price_for_period(current_season_name, period_name)
|
||||
return _build_event(
|
||||
self.key_base,
|
||||
current_season_name,
|
||||
period_name,
|
||||
price,
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
async def async_get_events(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
) -> list[CalendarEvent]:
|
||||
"""Return calendar events (tariff periods) within a datetime range."""
|
||||
events: list[CalendarEvent] = []
|
||||
|
||||
start_date = dt_util.as_local(start_date)
|
||||
end_date = dt_util.as_local(end_date)
|
||||
|
||||
# Start one day earlier to catch TOU periods that cross midnight
|
||||
# from the previous evening into the query range.
|
||||
current_day = dt_util.start_of_local_day(start_date) - timedelta(days=1)
|
||||
while current_day < end_date:
|
||||
season_name = self._get_current_season(current_day)
|
||||
if not season_name or not self.seasons.get(season_name):
|
||||
current_day += timedelta(days=1)
|
||||
continue
|
||||
|
||||
tou_periods = self.seasons[season_name].get("tou_periods", {})
|
||||
|
||||
for period_name, period_group in tou_periods.items():
|
||||
for period_def in period_group.get("periods", []):
|
||||
result = _parse_period_times(period_def, current_day)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
start_time, end_time = result
|
||||
|
||||
if start_time < end_date and end_time > start_date:
|
||||
price = self._get_price_for_period(season_name, period_name)
|
||||
events.append(
|
||||
_build_event(
|
||||
self.key_base,
|
||||
season_name,
|
||||
period_name,
|
||||
price,
|
||||
start_time,
|
||||
end_time,
|
||||
)
|
||||
)
|
||||
|
||||
current_day += timedelta(days=1)
|
||||
|
||||
events.sort(key=lambda x: x.start)
|
||||
return events
|
||||
|
||||
def _get_current_season(self, date_to_check: datetime) -> str | None:
|
||||
"""Determine the active season for a given date."""
|
||||
local_date = dt_util.as_local(date_to_check)
|
||||
year = local_date.year
|
||||
|
||||
for season_name, season_data in self.seasons.items():
|
||||
if not season_data:
|
||||
continue
|
||||
|
||||
try:
|
||||
from_month = season_data["fromMonth"]
|
||||
from_day = season_data["fromDay"]
|
||||
to_month = season_data["toMonth"]
|
||||
to_day = season_data["toDay"]
|
||||
|
||||
# Handle seasons that cross year boundaries
|
||||
start_year = year
|
||||
end_year = year
|
||||
|
||||
# Season crosses year boundary (e.g., Oct-Mar)
|
||||
if from_month > to_month or (
|
||||
from_month == to_month and from_day > to_day
|
||||
):
|
||||
if local_date.month > from_month or (
|
||||
local_date.month == from_month and local_date.day >= from_day
|
||||
):
|
||||
end_year = year + 1
|
||||
else:
|
||||
start_year = year - 1
|
||||
|
||||
season_start = local_date.replace(
|
||||
year=start_year,
|
||||
month=from_month,
|
||||
day=from_day,
|
||||
hour=0,
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0,
|
||||
)
|
||||
season_end = local_date.replace(
|
||||
year=end_year,
|
||||
month=to_month,
|
||||
day=to_day,
|
||||
hour=0,
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0,
|
||||
) + timedelta(days=1)
|
||||
|
||||
if season_start <= local_date < season_end:
|
||||
return season_name
|
||||
except KeyError, ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
def _get_price_for_period(self, season_name: str, period_name: str) -> float | None:
|
||||
"""Get the price for a specific season and period name."""
|
||||
try:
|
||||
season_charges = self.charges.get(season_name, self.charges.get("ALL", {}))
|
||||
rates = season_charges.get("rates", {})
|
||||
price = rates.get(period_name, rates.get("ALL"))
|
||||
return float(price) if price is not None else None
|
||||
except KeyError, ValueError, TypeError:
|
||||
return None
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the Calendar attributes from coordinator data."""
|
||||
self.seasons = self.coordinator.data.get(f"{self.key_base}_seasons", {})
|
||||
self.charges = self.coordinator.data.get(f"{self.key_base}_energy_charges", {})
|
||||
self._attr_available = bool(self.seasons and self.charges)
|
||||
@@ -104,7 +104,6 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
|
||||
return flatten(data)
|
||||
|
||||
|
||||
@@ -201,11 +200,7 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]])
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
|
||||
return flatten(
|
||||
data,
|
||||
skip_keys=["daily_charges", "demand_charges", "energy_charges", "seasons"],
|
||||
)
|
||||
return flatten(data)
|
||||
|
||||
|
||||
class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
@@ -11,20 +11,14 @@ from homeassistant.helpers import device_registry as dr
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
|
||||
def flatten(
|
||||
data: dict[str, Any],
|
||||
parent: str | None = None,
|
||||
*,
|
||||
skip_keys: list[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]:
|
||||
"""Flatten the data structure."""
|
||||
result = {}
|
||||
for key, value in data.items():
|
||||
skip = skip_keys and key in skip_keys
|
||||
if parent:
|
||||
key = f"{parent}_{key}"
|
||||
if isinstance(value, dict) and not skip:
|
||||
result.update(flatten(value, key, skip_keys=skip_keys))
|
||||
if isinstance(value, dict):
|
||||
result.update(flatten(value, key))
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
@@ -272,14 +272,6 @@
|
||||
"name": "Wake"
|
||||
}
|
||||
},
|
||||
"calendar": {
|
||||
"tariff_content_v2": {
|
||||
"name": "Buy tariff"
|
||||
},
|
||||
"tariff_content_v2_sell_tariff": {
|
||||
"name": "Sell tariff"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"climate_state_cabin_overheat_protection": {
|
||||
"name": "Cabin overheat protection"
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["uiprotect==10.2.2", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==10.2.1", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -236,6 +236,12 @@ class StateVacuumEntity(
|
||||
if self.__vacuum_legacy_battery_icon:
|
||||
self._report_deprecated_battery_properties("battery_icon")
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine."""
|
||||
super().async_write_ha_state()
|
||||
self._async_check_segments_issues()
|
||||
|
||||
@callback
|
||||
def async_registry_entry_updated(self) -> None:
|
||||
"""Run when the entity registry entry has been updated."""
|
||||
@@ -508,6 +514,43 @@ class StateVacuumEntity(
|
||||
return
|
||||
|
||||
options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {})
|
||||
should_have_not_configured_issue = (
|
||||
VacuumEntityFeature.CLEAN_AREA in self.supported_features
|
||||
and options.get("area_mapping") is None
|
||||
)
|
||||
|
||||
if (
|
||||
should_have_not_configured_issue
|
||||
and not self._segments_not_configured_issue_created
|
||||
):
|
||||
issue_id = (
|
||||
f"{ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED}_{self.registry_entry.id}"
|
||||
)
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
data={
|
||||
"entry_id": self.registry_entry.id,
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED,
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
)
|
||||
self._segments_not_configured_issue_created = True
|
||||
elif (
|
||||
not should_have_not_configured_issue
|
||||
and self._segments_not_configured_issue_created
|
||||
):
|
||||
issue_id = (
|
||||
f"{ISSUE_SEGMENTS_MAPPING_NOT_CONFIGURED}_{self.registry_entry.id}"
|
||||
)
|
||||
ir.async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
self._segments_not_configured_issue_created = False
|
||||
|
||||
if self._segments_changed_last_seen is not None and (
|
||||
VacuumEntityFeature.CLEAN_AREA not in self.supported_features
|
||||
|
||||
@@ -93,6 +93,10 @@
|
||||
"segments_changed": {
|
||||
"description": "",
|
||||
"title": "Vacuum segments have changed for {entity_id}"
|
||||
},
|
||||
"segments_mapping_not_configured": {
|
||||
"description": "",
|
||||
"title": "Vacuum segment mapping not configured for {entity_id}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""The waze_travel_time component."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pywaze.route_calculator import WazeRouteCalculator
|
||||
@@ -19,8 +18,6 @@ from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.location import find_coordinates
|
||||
from homeassistant.helpers.selector import (
|
||||
BooleanSelector,
|
||||
DurationSelector,
|
||||
DurationSelectorConfig,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
@@ -38,11 +35,9 @@ from .const import (
|
||||
CONF_INCL_FILTER,
|
||||
CONF_ORIGIN,
|
||||
CONF_REALTIME,
|
||||
CONF_TIME_DELTA,
|
||||
CONF_UNITS,
|
||||
CONF_VEHICLE_TYPE,
|
||||
DEFAULT_FILTER,
|
||||
DEFAULT_TIME_DELTA,
|
||||
DEFAULT_VEHICLE_TYPE,
|
||||
DOMAIN,
|
||||
METRIC_UNITS,
|
||||
@@ -100,9 +95,6 @@ SERVICE_GET_TRAVEL_TIMES_SCHEMA = vol.Schema(
|
||||
multiple=True,
|
||||
),
|
||||
),
|
||||
vol.Optional(CONF_TIME_DELTA): DurationSelector(
|
||||
DurationSelectorConfig(allow_negative=True, enable_second=False)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -138,13 +130,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
origin = origin_coordinates or service.data[CONF_ORIGIN]
|
||||
destination = destination_coordinates or service.data[CONF_DESTINATION]
|
||||
|
||||
time_delta = int(
|
||||
timedelta(
|
||||
**service.data.get(CONF_TIME_DELTA, DEFAULT_TIME_DELTA)
|
||||
).total_seconds()
|
||||
/ 60
|
||||
)
|
||||
|
||||
response = await async_get_travel_times(
|
||||
client=client,
|
||||
origin=origin,
|
||||
@@ -157,7 +142,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
units=service.data[CONF_UNITS],
|
||||
incl_filters=service.data.get(CONF_INCL_FILTER, DEFAULT_FILTER),
|
||||
excl_filters=service.data.get(CONF_EXCL_FILTER, DEFAULT_FILTER),
|
||||
time_delta=time_delta,
|
||||
)
|
||||
return {"routes": [vars(route) for route in response]}
|
||||
|
||||
@@ -200,22 +184,4 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version == 2 and config_entry.minor_version == 1:
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
options = dict(config_entry.options)
|
||||
options[CONF_TIME_DELTA] = DEFAULT_TIME_DELTA
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, options=options, minor_version=2
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -17,8 +17,6 @@ from homeassistant.const import CONF_NAME, CONF_REGION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.selector import (
|
||||
BooleanSelector,
|
||||
DurationSelector,
|
||||
DurationSelectorConfig,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
@@ -37,13 +35,11 @@ from .const import (
|
||||
CONF_INCL_FILTER,
|
||||
CONF_ORIGIN,
|
||||
CONF_REALTIME,
|
||||
CONF_TIME_DELTA,
|
||||
CONF_UNITS,
|
||||
CONF_VEHICLE_TYPE,
|
||||
DEFAULT_FILTER,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_OPTIONS,
|
||||
DEFAULT_TIME_DELTA,
|
||||
DOMAIN,
|
||||
IMPERIAL_UNITS,
|
||||
REGIONS,
|
||||
@@ -86,12 +82,6 @@ OPTIONS_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_AVOID_TOLL_ROADS): BooleanSelector(),
|
||||
vol.Optional(CONF_AVOID_SUBSCRIPTION_ROADS): BooleanSelector(),
|
||||
vol.Optional(CONF_AVOID_FERRIES): BooleanSelector(),
|
||||
vol.Optional(CONF_TIME_DELTA): DurationSelector(
|
||||
DurationSelectorConfig(
|
||||
allow_negative=True,
|
||||
enable_second=False,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -112,9 +102,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def default_options(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, str | bool | list[str] | dict[str, int]]:
|
||||
def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]:
|
||||
"""Get the default options."""
|
||||
defaults = DEFAULT_OPTIONS.copy()
|
||||
if hass.config.units is US_CUSTOMARY_SYSTEM:
|
||||
@@ -132,8 +120,6 @@ class WazeOptionsFlow(OptionsFlow):
|
||||
user_input[CONF_INCL_FILTER] = DEFAULT_FILTER
|
||||
if user_input.get(CONF_EXCL_FILTER) is None:
|
||||
user_input[CONF_EXCL_FILTER] = DEFAULT_FILTER
|
||||
if user_input.get(CONF_TIME_DELTA) is None:
|
||||
user_input[CONF_TIME_DELTA] = DEFAULT_TIME_DELTA
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data=user_input,
|
||||
@@ -151,7 +137,6 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Waze Travel Time."""
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
|
||||
@@ -15,10 +15,8 @@ CONF_VEHICLE_TYPE = "vehicle_type"
|
||||
CONF_AVOID_TOLL_ROADS = "avoid_toll_roads"
|
||||
CONF_AVOID_SUBSCRIPTION_ROADS = "avoid_subscription_roads"
|
||||
CONF_AVOID_FERRIES = "avoid_ferries"
|
||||
CONF_TIME_DELTA = "time_delta"
|
||||
|
||||
DEFAULT_NAME = "Waze Travel Time"
|
||||
DEFAULT_TIME_DELTA = {"minutes": 0}
|
||||
DEFAULT_REALTIME = True
|
||||
DEFAULT_VEHICLE_TYPE = "car"
|
||||
DEFAULT_AVOID_TOLL_ROADS = False
|
||||
@@ -33,7 +31,7 @@ UNITS = [METRIC_UNITS, IMPERIAL_UNITS]
|
||||
REGIONS = ["us", "na", "eu", "il", "au"]
|
||||
VEHICLE_TYPES = ["car", "taxi", "motorcycle"]
|
||||
|
||||
DEFAULT_OPTIONS: dict[str, str | bool | list[str] | dict[str, int]] = {
|
||||
DEFAULT_OPTIONS: dict[str, str | bool | list[str]] = {
|
||||
CONF_REALTIME: DEFAULT_REALTIME,
|
||||
CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE,
|
||||
CONF_UNITS: METRIC_UNITS,
|
||||
@@ -42,5 +40,4 @@ DEFAULT_OPTIONS: dict[str, str | bool | list[str] | dict[str, int]] = {
|
||||
CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS,
|
||||
CONF_INCL_FILTER: DEFAULT_FILTER,
|
||||
CONF_EXCL_FILTER: DEFAULT_FILTER,
|
||||
CONF_TIME_DELTA: DEFAULT_TIME_DELTA,
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ from .const import (
|
||||
CONF_INCL_FILTER,
|
||||
CONF_ORIGIN,
|
||||
CONF_REALTIME,
|
||||
CONF_TIME_DELTA,
|
||||
CONF_UNITS,
|
||||
CONF_VEHICLE_TYPE,
|
||||
DOMAIN,
|
||||
@@ -52,7 +51,6 @@ async def async_get_travel_times(
|
||||
units: Literal["metric", "imperial"] = "metric",
|
||||
incl_filters: Collection[str] | None = None,
|
||||
excl_filters: Collection[str] | None = None,
|
||||
time_delta: int = 0,
|
||||
) -> list[CalcRoutesResponse]:
|
||||
"""Get all available routes."""
|
||||
|
||||
@@ -76,7 +74,6 @@ async def async_get_travel_times(
|
||||
avoid_ferries=avoid_ferries,
|
||||
real_time=realtime,
|
||||
alternatives=3,
|
||||
time_delta=time_delta,
|
||||
)
|
||||
|
||||
if len(routes) < 1:
|
||||
@@ -207,11 +204,6 @@ class WazeTravelTimeCoordinator(DataUpdateCoordinator[WazeTravelTimeData]):
|
||||
CONF_AVOID_SUBSCRIPTION_ROADS
|
||||
]
|
||||
avoid_ferries = self.config_entry.options[CONF_AVOID_FERRIES]
|
||||
time_delta = int(
|
||||
timedelta(**self.config_entry.options[CONF_TIME_DELTA]).total_seconds()
|
||||
/ 60
|
||||
)
|
||||
|
||||
routes = await async_get_travel_times(
|
||||
self.client,
|
||||
origin_coordinates,
|
||||
@@ -224,7 +216,6 @@ class WazeTravelTimeCoordinator(DataUpdateCoordinator[WazeTravelTimeData]):
|
||||
self.config_entry.options[CONF_UNITS],
|
||||
incl_filter,
|
||||
excl_filter,
|
||||
time_delta,
|
||||
)
|
||||
if len(routes) < 1:
|
||||
travel_data = WazeTravelTimeData(
|
||||
|
||||
@@ -65,7 +65,3 @@ get_travel_times:
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
time_delta:
|
||||
required: false
|
||||
selector:
|
||||
duration:
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
"excl_filter": "Exact street name which must NOT be part of the selected route",
|
||||
"incl_filter": "Exact street name which must be part of the selected route",
|
||||
"realtime": "Realtime travel time?",
|
||||
"time_delta": "Time delta",
|
||||
"units": "Units",
|
||||
"vehicle_type": "Vehicle type"
|
||||
},
|
||||
@@ -101,10 +100,6 @@
|
||||
"description": "The region. Controls which Waze server is used.",
|
||||
"name": "[%key:component::waze_travel_time::config::step::user::data::region%]"
|
||||
},
|
||||
"time_delta": {
|
||||
"description": "Time offset from now to calculate the route for. Positive values are in the future, negative values are in the past.",
|
||||
"name": "Time delta"
|
||||
},
|
||||
"units": {
|
||||
"description": "Which unit system to use.",
|
||||
"name": "[%key:component::waze_travel_time::options::step::init::data::units%]"
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"addon_get_discovery_info_failed": "Failed to get Z-Wave JS app discovery info.",
|
||||
"addon_info_failed": "Failed to get Z-Wave JS app info.",
|
||||
"addon_install_failed": "Failed to install the Z-Wave JS app.",
|
||||
"addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave JS app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).",
|
||||
"addon_set_config_failed": "Failed to set Z-Wave JS app configuration.",
|
||||
"addon_start_failed": "Failed to start the Z-Wave JS app.",
|
||||
"addon_stop_failed": "Failed to stop the Z-Wave JS app.",
|
||||
"addon_get_discovery_info_failed": "Failed to get Z-Wave app discovery info.",
|
||||
"addon_info_failed": "Failed to get Z-Wave app info.",
|
||||
"addon_install_failed": "Failed to install the Z-Wave app.",
|
||||
"addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave Supervisor app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).",
|
||||
"addon_set_config_failed": "Failed to set Z-Wave configuration.",
|
||||
"addon_start_failed": "Failed to start the Z-Wave app.",
|
||||
"addon_stop_failed": "Failed to stop the Z-Wave app.",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"backup_failed": "Failed to back up network.",
|
||||
@@ -17,15 +17,15 @@
|
||||
"discovery_requires_supervisor": "Discovery requires the Home Assistant Supervisor.",
|
||||
"migration_low_sdk_version": "The SDK version of the old adapter is lower than {ok_sdk_version}. This means it's not possible to migrate the non-volatile memory (NVM) of the old adapter to another adapter.\n\nCheck the documentation on the manufacturer support pages of the old adapter, if it's possible to upgrade the firmware of the old adapter to a version that is built with SDK version {ok_sdk_version} or higher.",
|
||||
"migration_successful": "Migration successful.",
|
||||
"not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave JS app.",
|
||||
"not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave app.",
|
||||
"not_zwave_device": "Discovered device is not a Z-Wave device.",
|
||||
"not_zwave_js_addon": "Discovered app is not the official Z-Wave JS app.",
|
||||
"not_zwave_js_addon": "Discovered app is not the official Z-Wave app.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"reset_failed": "Failed to reset adapter.",
|
||||
"usb_ports_failed": "Failed to get USB devices."
|
||||
},
|
||||
"error": {
|
||||
"addon_start_failed": "Failed to start the Z-Wave JS app. Check the configuration.",
|
||||
"addon_start_failed": "Failed to start the Z-Wave app. Check the configuration.",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_ws_url": "Invalid websocket URL",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
@@ -65,7 +65,7 @@
|
||||
"usb_path": "[%key:common::config_flow::data::usb_path%]"
|
||||
},
|
||||
"description": "Select your Z-Wave adapter",
|
||||
"title": "Enter the Z-Wave JS app configuration"
|
||||
"title": "Enter the Z-Wave app configuration"
|
||||
},
|
||||
"configure_security_keys": {
|
||||
"data": {
|
||||
@@ -84,7 +84,7 @@
|
||||
"title": "Migrate to a new adapter"
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"description": "Do you want to set up the Z-Wave integration with the Z-Wave JS app?"
|
||||
"description": "Do you want to set up the Z-Wave integration with the Z-Wave app?"
|
||||
},
|
||||
"install_addon": {
|
||||
"title": "Installing app"
|
||||
@@ -127,9 +127,9 @@
|
||||
},
|
||||
"on_supervisor": {
|
||||
"data": {
|
||||
"use_addon": "Use the Z-Wave JS app"
|
||||
"use_addon": "Use the Z-Wave Supervisor app"
|
||||
},
|
||||
"description": "Do you want to use the Z-Wave JS app?",
|
||||
"description": "Do you want to use the Z-Wave Supervisor app?",
|
||||
"title": "Select connection method"
|
||||
},
|
||||
"on_supervisor_reconfigure": {
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -515,7 +515,6 @@ FLOWS = {
|
||||
"openweathermap",
|
||||
"opower",
|
||||
"oralb",
|
||||
"orvibo",
|
||||
"osoenergy",
|
||||
"otbr",
|
||||
"otp",
|
||||
|
||||
1
homeassistant/generated/entity_platforms.py
generated
1
homeassistant/generated/entity_platforms.py
generated
@@ -29,7 +29,6 @@ class EntityPlatforms(StrEnum):
|
||||
HUMIDIFIER = "humidifier"
|
||||
IMAGE = "image"
|
||||
IMAGE_PROCESSING = "image_processing"
|
||||
INFRARED = "infrared"
|
||||
LAWN_MOWER = "lawn_mower"
|
||||
LIGHT = "light"
|
||||
LOCK = "lock"
|
||||
|
||||
@@ -5002,7 +5002,7 @@
|
||||
"orvibo": {
|
||||
"name": "Orvibo",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"osoenergy": {
|
||||
@@ -7344,12 +7344,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"ubisys": {
|
||||
"name": "Ubisys",
|
||||
"iot_standards": [
|
||||
"zigbee"
|
||||
]
|
||||
},
|
||||
"ubiwizz": {
|
||||
"name": "Ubiwizz",
|
||||
"integration_type": "virtual",
|
||||
|
||||
@@ -40,7 +40,7 @@ habluetooth==5.8.0
|
||||
hass-nabucasa==1.15.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260226.0
|
||||
home-assistant-frontend==20260225.0
|
||||
home-assistant-intents==2026.2.13
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
@@ -32,11 +32,8 @@ def write_utf8_file_atomic(
|
||||
Using this function frequently will significantly
|
||||
negatively impact performance.
|
||||
"""
|
||||
encoding = "utf-8" if "b" not in mode else None
|
||||
try:
|
||||
with AtomicWriter( # type: ignore[call-arg] # atomicwrites-stubs is outdated, encoding is a valid kwarg
|
||||
filename, mode=mode, overwrite=True, encoding=encoding
|
||||
).open() as fdesc:
|
||||
with AtomicWriter(filename, mode=mode, overwrite=True).open() as fdesc:
|
||||
if not private:
|
||||
os.fchmod(fdesc.fileno(), 0o644)
|
||||
fdesc.write(utf8_data)
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -2646,16 +2646,6 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.infrared.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.input_button.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
1
requirements.txt
generated
1
requirements.txt
generated
@@ -31,7 +31,6 @@ home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.2.13
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
infrared-protocols==1.0.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
|
||||
7
requirements_all.txt
generated
7
requirements_all.txt
generated
@@ -1226,7 +1226,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260226.0
|
||||
home-assistant-frontend==20260225.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.2.13
|
||||
@@ -1315,9 +1315,6 @@ influxdb-client==1.50.0
|
||||
# homeassistant.components.influxdb
|
||||
influxdb==5.3.1
|
||||
|
||||
# homeassistant.components.infrared
|
||||
infrared-protocols==1.0.0
|
||||
|
||||
# homeassistant.components.inkbird
|
||||
inkbird-ble==1.1.1
|
||||
|
||||
@@ -3148,7 +3145,7 @@ uasiren==0.0.1
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.2.2
|
||||
uiprotect==10.2.1
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@@ -1087,7 +1087,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260226.0
|
||||
home-assistant-frontend==20260225.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.2.13
|
||||
@@ -1164,9 +1164,6 @@ influxdb-client==1.50.0
|
||||
# homeassistant.components.influxdb
|
||||
influxdb==5.3.1
|
||||
|
||||
# homeassistant.components.infrared
|
||||
infrared-protocols==1.0.0
|
||||
|
||||
# homeassistant.components.inkbird
|
||||
inkbird-ble==1.1.1
|
||||
|
||||
@@ -1499,9 +1496,6 @@ opower==0.17.0
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==1.0.2
|
||||
|
||||
# homeassistant.components.orvibo
|
||||
orvibo==1.1.2
|
||||
|
||||
# homeassistant.components.ourgroceries
|
||||
ourgroceries==1.5.4
|
||||
|
||||
@@ -2651,7 +2645,7 @@ uasiren==0.0.1
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.2.2
|
||||
uiprotect==10.2.1
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
@@ -9,6 +9,7 @@ from jaraco.abode.exceptions import (
|
||||
)
|
||||
|
||||
from homeassistant.components.abode.const import DOMAIN
|
||||
from homeassistant.components.abode.services import SERVICE_SETTINGS
|
||||
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_USERNAME
|
||||
@@ -24,7 +25,7 @@ async def test_change_settings(hass: HomeAssistant) -> None:
|
||||
with patch("jaraco.abode.client.Client.set_setting") as mock_set_setting:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"change_setting",
|
||||
SERVICE_SETTINGS,
|
||||
{"setting": "confirm_snd", "value": "loud"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.abode.const import DOMAIN
|
||||
from homeassistant.components.abode.services import SERVICE_TRIGGER_AUTOMATION
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -117,7 +118,7 @@ async def test_trigger_automation(hass: HomeAssistant) -> None:
|
||||
with patch("jaraco.abode.automation.Automation.trigger") as mock:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"trigger_automation",
|
||||
SERVICE_TRIGGER_AUTOMATION,
|
||||
{ATTR_ENTITY_ID: AUTOMATION_ID},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
@@ -7,6 +7,9 @@ from homeassistant.components.advantage_air.const import DOMAIN
|
||||
from homeassistant.components.advantage_air.sensor import (
|
||||
ADVANTAGE_AIR_SET_COUNTDOWN_VALUE,
|
||||
)
|
||||
from homeassistant.components.advantage_air.services import (
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -41,7 +44,7 @@ async def test_sensor_platform(
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"set_time_to",
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
{ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -61,7 +64,7 @@ async def test_sensor_platform(
|
||||
value = 0
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"set_time_to",
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
{ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
@@ -10,6 +10,9 @@ from homeassistant.components.alexa_devices.services import (
|
||||
ATTR_INFO_SKILL,
|
||||
ATTR_SOUND,
|
||||
ATTR_TEXT_COMMAND,
|
||||
SERVICE_INFO_SKILL,
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
SERVICE_TEXT_COMMAND,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
@@ -32,9 +35,9 @@ async def test_setup_services(
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert (services := hass.services.async_services_for_domain(DOMAIN))
|
||||
assert "send_text_command" in services
|
||||
assert "send_sound" in services
|
||||
assert "send_info_skill" in services
|
||||
assert SERVICE_TEXT_COMMAND in services
|
||||
assert SERVICE_SOUND_NOTIFICATION in services
|
||||
assert SERVICE_INFO_SKILL in services
|
||||
|
||||
|
||||
async def test_info_skill_service(
|
||||
@@ -55,7 +58,7 @@ async def test_info_skill_service(
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_info_skill",
|
||||
SERVICE_INFO_SKILL,
|
||||
{
|
||||
ATTR_INFO_SKILL: "tell_joke",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
@@ -85,7 +88,7 @@ async def test_send_sound_service(
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
{
|
||||
ATTR_SOUND: "bell_02",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
@@ -115,7 +118,7 @@ async def test_send_text_service(
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_text_command",
|
||||
SERVICE_TEXT_COMMAND,
|
||||
{
|
||||
ATTR_TEXT_COMMAND: "Play B.B.C. radio on TuneIn",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
@@ -170,7 +173,7 @@ async def test_invalid_parameters(
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
{
|
||||
ATTR_SOUND: sound,
|
||||
ATTR_DEVICE_ID: device_id,
|
||||
@@ -226,7 +229,7 @@ async def test_invalid_info_skillparameters(
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_info_skill",
|
||||
SERVICE_INFO_SKILL,
|
||||
{
|
||||
ATTR_INFO_SKILL: info_skill,
|
||||
ATTR_DEVICE_ID: device_id,
|
||||
@@ -263,7 +266,7 @@ async def test_config_entry_not_loaded(
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
{
|
||||
ATTR_SOUND: "bell_02",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
@@ -297,7 +300,7 @@ async def test_invalid_config_entry(
|
||||
# Call Service
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
{
|
||||
ATTR_SOUND: "bell_02",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
@@ -329,7 +332,7 @@ async def test_missing_config_entry(
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"send_sound",
|
||||
SERVICE_SOUND_NOTIFICATION,
|
||||
{
|
||||
ATTR_SOUND: "bell_02",
|
||||
ATTR_DEVICE_ID: device_entry.id,
|
||||
|
||||
@@ -5,7 +5,7 @@ import re
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.amberelectric.const import DOMAIN
|
||||
from homeassistant.components.amberelectric.const import DOMAIN, SERVICE_GET_FORECASTS
|
||||
from homeassistant.components.amberelectric.services import ATTR_CHANNEL_TYPE
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -30,7 +30,7 @@ async def test_get_general_forecasts(
|
||||
await setup_integration(hass, general_channel_config_entry)
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID, ATTR_CHANNEL_TYPE: "general"},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
@@ -59,7 +59,7 @@ async def test_get_controlled_load_forecasts(
|
||||
await setup_integration(hass, general_channel_and_controlled_load_config_entry)
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: GENERAL_AND_CONTROLLED_SITE_ID,
|
||||
ATTR_CHANNEL_TYPE: "controlled_load",
|
||||
@@ -91,7 +91,7 @@ async def test_get_feed_in_forecasts(
|
||||
await setup_integration(hass, general_channel_and_feed_in_config_entry)
|
||||
result = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: GENERAL_AND_FEED_IN_SITE_ID,
|
||||
ATTR_CHANNEL_TYPE: "feed_in",
|
||||
@@ -130,7 +130,7 @@ async def test_incorrect_channel_type(
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID,
|
||||
ATTR_CHANNEL_TYPE: "incorrect",
|
||||
@@ -153,7 +153,7 @@ async def test_unavailable_channel_type(
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: GENERAL_ONLY_SITE_ID,
|
||||
ATTR_CHANNEL_TYPE: "controlled_load",
|
||||
@@ -178,7 +178,7 @@ async def test_service_entry_availability(
|
||||
with pytest.raises(ServiceValidationError) as err:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: mock_config_entry2.entry_id,
|
||||
ATTR_CHANNEL_TYPE: "general",
|
||||
@@ -192,7 +192,7 @@ async def test_service_entry_availability(
|
||||
with pytest.raises(ServiceValidationError) as err:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get_forecasts",
|
||||
SERVICE_GET_FORECASTS,
|
||||
{ATTR_CONFIG_ENTRY_ID: "bad-config_id", ATTR_CHANNEL_TYPE: "general"},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
|
||||
@@ -24,6 +24,10 @@ from homeassistant.components.androidtv.const import (
|
||||
from homeassistant.components.androidtv.services import (
|
||||
ATTR_DEVICE_PATH,
|
||||
ATTR_LOCAL_PATH,
|
||||
SERVICE_ADB_COMMAND,
|
||||
SERVICE_DOWNLOAD,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
SERVICE_UPLOAD,
|
||||
)
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_INPUT_SOURCE,
|
||||
@@ -499,7 +503,7 @@ async def test_adb_command(hass: HomeAssistant) -> None:
|
||||
) as patch_shell:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"adb_command",
|
||||
SERVICE_ADB_COMMAND,
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -530,7 +534,7 @@ async def test_adb_command_unicode_decode_error(hass: HomeAssistant) -> None:
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"adb_command",
|
||||
SERVICE_ADB_COMMAND,
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -559,7 +563,7 @@ async def test_adb_command_key(hass: HomeAssistant) -> None:
|
||||
) as patch_shell:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"adb_command",
|
||||
SERVICE_ADB_COMMAND,
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -590,7 +594,7 @@ async def test_adb_command_get_properties(hass: HomeAssistant) -> None:
|
||||
) as patch_get_props:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"adb_command",
|
||||
SERVICE_ADB_COMMAND,
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -620,7 +624,7 @@ async def test_learn_sendevent(hass: HomeAssistant) -> None:
|
||||
) as patch_learn_sendevent:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"learn_sendevent",
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -689,7 +693,7 @@ async def test_download(hass: HomeAssistant) -> None:
|
||||
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_pull") as patch_pull:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"download",
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
ATTR_DEVICE_PATH: device_path,
|
||||
@@ -706,7 +710,7 @@ async def test_download(hass: HomeAssistant) -> None:
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"download",
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
ATTR_DEVICE_PATH: device_path,
|
||||
@@ -735,7 +739,7 @@ async def test_upload(hass: HomeAssistant) -> None:
|
||||
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_push") as patch_push:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"upload",
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
ATTR_DEVICE_PATH: device_path,
|
||||
@@ -752,7 +756,7 @@ async def test_upload(hass: HomeAssistant) -> None:
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"upload",
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
ATTR_DEVICE_PATH: device_path,
|
||||
|
||||
@@ -4,7 +4,7 @@ import datetime
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
from anthropic import AuthenticationError, RateLimitError
|
||||
from anthropic import RateLimitError
|
||||
from anthropic.types import (
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
@@ -36,10 +36,8 @@ from homeassistant.components.anthropic.const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.anthropic.entity import CitationDetails, ContentDetails
|
||||
from homeassistant.config_entries import SOURCE_REAUTH
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -109,7 +107,7 @@ async def test_error_handling(
|
||||
mock_init_component,
|
||||
mock_create_stream: AsyncMock,
|
||||
) -> None:
|
||||
"""Test error handling."""
|
||||
"""Test that the default prompt works."""
|
||||
mock_create_stream.side_effect = RateLimitError(
|
||||
message=None,
|
||||
response=Response(status_code=429, request=Request(method="POST", url=URL())),
|
||||
@@ -124,38 +122,6 @@ async def test_error_handling(
|
||||
assert result.response.error_code == "unknown", result
|
||||
|
||||
|
||||
async def test_auth_error_handling(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_init_component,
|
||||
mock_create_stream: AsyncMock,
|
||||
) -> None:
|
||||
"""Test reauth after authentication error during conversation."""
|
||||
mock_create_stream.side_effect = AuthenticationError(
|
||||
message="Invalid API key",
|
||||
response=Response(status_code=403, request=Request(method="POST", url=URL())),
|
||||
body=None,
|
||||
)
|
||||
|
||||
result = await conversation.async_converse(
|
||||
hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
|
||||
)
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR
|
||||
assert result.response.error_code == "unknown", result
|
||||
|
||||
await hass.async_block_till_done()
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
|
||||
flow = flows[0]
|
||||
assert flow["step_id"] == "reauth_confirm"
|
||||
assert flow["handler"] == DOMAIN
|
||||
assert "context" in flow
|
||||
assert flow["context"]["source"] == SOURCE_REAUTH
|
||||
assert flow["context"]["entry_id"] == mock_config_entry.entry_id
|
||||
|
||||
|
||||
async def test_template_error(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_entry_diagnostics[large]
|
||||
dict({
|
||||
'backup': list([
|
||||
dict({
|
||||
'addons': list([
|
||||
]),
|
||||
'backup_id': '23e64aec',
|
||||
'database_included': True,
|
||||
'date': '2024-11-22T11:48:48.727189+01:00',
|
||||
'extra_metadata': dict({
|
||||
}),
|
||||
'folders': list([
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0.dev0',
|
||||
'name': 'Core 2024.12.0.dev0',
|
||||
'protected': False,
|
||||
'size': 20971520,
|
||||
}),
|
||||
]),
|
||||
'backup_agents': list([
|
||||
dict({
|
||||
'name': 'test',
|
||||
}),
|
||||
]),
|
||||
'config': dict({
|
||||
'access_key_id': '**REDACTED**',
|
||||
'bucket': 'test',
|
||||
'endpoint_url': 'https://s3.eu-south-1.amazonaws.com',
|
||||
'secret_access_key': '**REDACTED**',
|
||||
}),
|
||||
'coordinator_data': dict({
|
||||
'all_backups_size': 20971520,
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_entry_diagnostics[small]
|
||||
dict({
|
||||
'backup': list([
|
||||
dict({
|
||||
'addons': list([
|
||||
]),
|
||||
'backup_id': '23e64aec',
|
||||
'database_included': True,
|
||||
'date': '2024-11-22T11:48:48.727189+01:00',
|
||||
'extra_metadata': dict({
|
||||
}),
|
||||
'folders': list([
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0.dev0',
|
||||
'name': 'Core 2024.12.0.dev0',
|
||||
'protected': False,
|
||||
'size': 1048576,
|
||||
}),
|
||||
]),
|
||||
'backup_agents': list([
|
||||
dict({
|
||||
'name': 'test',
|
||||
}),
|
||||
]),
|
||||
'config': dict({
|
||||
'access_key_id': '**REDACTED**',
|
||||
'bucket': 'test',
|
||||
'endpoint_url': 'https://s3.eu-south-1.amazonaws.com',
|
||||
'secret_access_key': '**REDACTED**',
|
||||
}),
|
||||
'coordinator_data': dict({
|
||||
'all_backups_size': 1048576,
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user