forked from home-assistant/core
Compare commits
123 Commits
2025.4.0b7
...
2025.4.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7af6a4f493 | ||
|
|
c25f26a290 | ||
|
|
8d62cb60a6 | ||
|
|
4f799069ea | ||
|
|
af708b78e0 | ||
|
|
f46e659740 | ||
|
|
7bd517e6ff | ||
|
|
e9abdab1f5 | ||
|
|
86eee4f041 | ||
|
|
9db60c830c | ||
|
|
c43a4682b9 | ||
|
|
2a4996055a | ||
|
|
4643fc2c14 | ||
|
|
6410b90d82 | ||
|
|
e5c00eceae | ||
|
|
fe65579df8 | ||
|
|
281beecb05 | ||
|
|
7546b5d269 | ||
|
|
490e3201b9 | ||
|
|
04be575139 | ||
|
|
854cae7f12 | ||
|
|
109d20978f | ||
|
|
f8d284ec4b | ||
|
|
06ebe0810f | ||
|
|
802ad2ff51 | ||
|
|
9070a8d579 | ||
|
|
e8b2a3de8b | ||
|
|
39549d5dd4 | ||
|
|
0c19e47bd4 | ||
|
|
05507d77e3 | ||
|
|
94558e2d40 | ||
|
|
4f22fe8f7f | ||
|
|
9e7dfbb857 | ||
|
|
02d182239a | ||
|
|
4e0f581747 | ||
|
|
42d97d348c | ||
|
|
69380c85ca | ||
|
|
b38c647830 | ||
|
|
2396fd1090 | ||
|
|
aa4eb89eee | ||
|
|
1b1bc6af95 | ||
|
|
f17003a79c | ||
|
|
ec70e8b0cd | ||
|
|
d888c70ff0 | ||
|
|
f29444002e | ||
|
|
fc66997a36 | ||
|
|
35513ae072 | ||
|
|
cd363d48c3 | ||
|
|
d47ef835d7 | ||
|
|
00177c699e | ||
|
|
11b0086a01 | ||
|
|
ceb177f80e | ||
|
|
fa3832fbd7 | ||
|
|
2b9c903429 | ||
|
|
a7c43f9b49 | ||
|
|
b428196149 | ||
|
|
e23da1a90f | ||
|
|
3951c2ea66 | ||
|
|
fee152654d | ||
|
|
51073c948c | ||
|
|
91438088a0 | ||
|
|
427e1abdae | ||
|
|
6e7ac45ac0 | ||
|
|
4b3b9ebc29 | ||
|
|
649d8638ed | ||
|
|
12c4152dbe | ||
|
|
8f9572bb05 | ||
|
|
6d022ff4e0 | ||
|
|
c0c2edb90a | ||
|
|
b014219fdd | ||
|
|
216b8ef400 | ||
|
|
f2ccd46267 | ||
|
|
e16ba27ce8 | ||
|
|
506526a6a2 | ||
|
|
a88678cf42 | ||
|
|
d0b61af7ec | ||
|
|
04f5315ab2 | ||
|
|
7f9e4ba39e | ||
|
|
06aaf188ea | ||
|
|
627f994872 | ||
|
|
9e81ec5aae | ||
|
|
69753fca1d | ||
|
|
7773cc121e | ||
|
|
3aa56936ad | ||
|
|
e66416c23d | ||
|
|
a592feae3d | ||
|
|
fc0d71e891 | ||
|
|
d4640f1d24 | ||
|
|
6fe158836e | ||
|
|
629c0087f4 | ||
|
|
363bd75129 | ||
|
|
7592d350a8 | ||
|
|
8ac8401b4e | ||
|
|
eed075dbfa | ||
|
|
23dbdedfb6 | ||
|
|
85ad29e28e | ||
|
|
35fc81b038 | ||
|
|
5d45b84cd2 | ||
|
|
7766649304 | ||
|
|
07e9020dfa | ||
|
|
f504a759e0 | ||
|
|
9927de4801 | ||
|
|
1244fc4682 | ||
|
|
e77a1b12f7 | ||
|
|
5459daaa10 | ||
|
|
400131df78 | ||
|
|
28e1843ff9 | ||
|
|
df777318d1 | ||
|
|
6ad5e9e89c | ||
|
|
a0bd8deee9 | ||
|
|
405cbd6a00 | ||
|
|
3e0eb5ab2c | ||
|
|
fad75a70b6 | ||
|
|
d9720283df | ||
|
|
14eed1778b | ||
|
|
049aaa7e8b | ||
|
|
35717e8216 | ||
|
|
2a081abc18 | ||
|
|
b7f29c7358 | ||
|
|
3bb6373df5 | ||
|
|
e1b4edec50 | ||
|
|
147bee57e1 | ||
|
|
fcdaea64da |
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1480,8 +1480,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
/tests/components/sunweg/ @rokam
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
|
||||
5
homeassistant/brands/eve.json
Normal file
5
homeassistant/brands/eve.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "eve",
|
||||
"name": "Eve",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -8,7 +8,7 @@ from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from pyairnow import WebServiceAPI
|
||||
from pyairnow.conv import aqi_to_concentration
|
||||
from pyairnow.errors import AirNowError
|
||||
from pyairnow.errors import AirNowError, InvalidJsonError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
distance=self.distance,
|
||||
)
|
||||
|
||||
except (AirNowError, ClientConnectorError) as error:
|
||||
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
if not obs:
|
||||
|
||||
@@ -20,6 +20,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
@@ -381,7 +382,9 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_IDENTIFIERS: list(combined_identifiers),
|
||||
},
|
||||
)
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
# Don't reload ignored entries or in the middle of reauth,
|
||||
# e.g. if the user is entering a new PIN
|
||||
if entry.source != SOURCE_IGNORE and self.source != SOURCE_REAUTH:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
||||
@@ -60,7 +60,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): vol.Any(str, None),
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -75,7 +76,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): vol.Any(str, None),
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
|
||||
@@ -180,7 +180,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self,
|
||||
message: str | None = None,
|
||||
media_id: str | None = None,
|
||||
preannounce_media_id: str | None = PREANNOUNCE_URL,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Play and show an announcement on the satellite.
|
||||
|
||||
@@ -190,8 +191,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the announcement.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
If preannounce_media_id is None, no sound is played.
|
||||
|
||||
Calls async_announce with message and media id.
|
||||
"""
|
||||
@@ -201,7 +202,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
message, media_id, preannounce_media_id
|
||||
message,
|
||||
media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
@@ -229,7 +232,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
preannounce_media_id: str | None = PREANNOUNCE_URL,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
@@ -239,8 +243,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce_media_id is provided, it is played before the announcement.
|
||||
If preannounce_media_id is None, no sound is played.
|
||||
If preannounce is True, a sound is played before the start message or media.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
@@ -257,7 +261,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message, start_media_id, preannounce_media_id
|
||||
start_message,
|
||||
start_media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
|
||||
@@ -15,6 +15,11 @@ announce:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
@@ -40,6 +45,11 @@ start_conversation:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -24,9 +24,13 @@
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the announcement."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the announcement."
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the announcement."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -46,9 +50,13 @@
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the start message or media."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the start message or media."
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the start message or media."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ async def websocket_test_connection(
|
||||
hass.async_create_background_task(
|
||||
satellite.async_internal_announce(
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
|
||||
preannounce_media_id=None,
|
||||
preannounce=False,
|
||||
),
|
||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||
)
|
||||
|
||||
@@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
blob.metadata is not None
|
||||
and backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
|
||||
@@ -501,18 +501,16 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
return
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
url: str | None = None
|
||||
for input_ in self._inputs:
|
||||
if input_.text == source:
|
||||
url = input_.url
|
||||
await self._player.play_url(input_.url)
|
||||
return
|
||||
for preset in self._presets:
|
||||
if preset.name == source:
|
||||
url = preset.url
|
||||
await self._player.load_preset(preset.id)
|
||||
return
|
||||
|
||||
if url is None:
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
|
||||
await self._player.play_url(url)
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Clear players playlist."""
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.26.1",
|
||||
"bluetooth-data-tools==1.26.5",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.37.0"
|
||||
]
|
||||
|
||||
@@ -127,7 +127,11 @@ class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implement
|
||||
flow_id=flow_id, user_input=tokens
|
||||
)
|
||||
|
||||
self.hass.async_create_task(await_tokens())
|
||||
# It's a background task because it should be cancelled on shutdown and there's nothing else
|
||||
# we can do in such case. There's also no need to wait for this during setup.
|
||||
self.hass.async_create_background_task(
|
||||
await_tokens(), name="Awaiting OAuth tokens"
|
||||
)
|
||||
|
||||
return authorize_url
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -98,13 +98,20 @@ class ComelitCoverEntity(
|
||||
"""Return if the cover is opening."""
|
||||
return self._current_action("opening")
|
||||
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
await self._api.set_device_status(COVER, self._device.index, action)
|
||||
self.coordinator.data[COVER][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close cover."""
|
||||
await self._api.set_device_status(COVER, self._device.index, STATE_OFF)
|
||||
await self._cover_set_state(STATE_OFF, 2)
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open cover."""
|
||||
await self._api.set_device_status(COVER, self._device.index, STATE_ON)
|
||||
await self._cover_set_state(STATE_ON, 1)
|
||||
|
||||
async def async_stop_cover(self, **_kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
@@ -112,13 +119,7 @@ class ComelitCoverEntity(
|
||||
return
|
||||
|
||||
action = STATE_ON if self.is_closing else STATE_OFF
|
||||
await self._api.set_device_status(COVER, self._device.index, action)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle device update."""
|
||||
self._last_state = self.state
|
||||
self.async_write_ha_state()
|
||||
await self._cover_set_state(action, 0)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
|
||||
@@ -162,7 +162,7 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
if self.mode == HumidifierComelitMode.OFF:
|
||||
if not self._attr_is_on:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="humidity_while_off",
|
||||
@@ -190,9 +190,13 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
self._device.index, self._set_command
|
||||
)
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
self._device.index, HumidifierComelitCommand.OFF
|
||||
)
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -59,7 +59,8 @@ class ComelitLightEntity(CoordinatorEntity[ComelitSerialBridge], LightEntity):
|
||||
async def _light_set_state(self, state: int) -> None:
|
||||
"""Set desired light state."""
|
||||
await self.coordinator.api.set_device_status(LIGHT, self._device.index, state)
|
||||
await self.coordinator.async_request_refresh()
|
||||
self.coordinator.data[LIGHT][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
|
||||
@@ -52,7 +52,9 @@
|
||||
"rest": "Rest",
|
||||
"sabotated": "Sabotated"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"humidifier": {
|
||||
"humidifier": {
|
||||
"name": "Humidifier"
|
||||
},
|
||||
|
||||
@@ -67,7 +67,8 @@ class ComelitSwitchEntity(CoordinatorEntity[ComelitSerialBridge], SwitchEntity):
|
||||
await self.coordinator.api.set_device_status(
|
||||
self._device.type, self._device.index, state
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
self.coordinator.data[self._device.type][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
|
||||
@@ -650,7 +650,14 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
if (
|
||||
(maybe_result is None) # first result
|
||||
or (num_matched_entities > best_num_matched_entities)
|
||||
or (
|
||||
# More literal text matched
|
||||
result.text_chunks_matched > maybe_result.text_chunks_matched
|
||||
)
|
||||
or (
|
||||
# More entities matched
|
||||
num_matched_entities > best_num_matched_entities
|
||||
)
|
||||
or (
|
||||
# Fewer unmatched entities
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
@@ -662,16 +669,6 @@ class DefaultAgent(ConversationEntity):
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
||||
)
|
||||
or (
|
||||
# More literal text matched
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||
and (
|
||||
result.text_chunks_matched
|
||||
> maybe_result.text_chunks_matched
|
||||
)
|
||||
)
|
||||
or (
|
||||
# Prefer match failures with entities
|
||||
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.24"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.28"]
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.util.ssl import client_context_no_verify
|
||||
|
||||
from .const import KEY_MAC, TIMEOUT
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
@@ -48,6 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
||||
key=entry.data.get(CONF_API_KEY),
|
||||
uuid=entry.data.get(CONF_UUID),
|
||||
password=entry.data.get(CONF_PASSWORD),
|
||||
ssl_context=client_context_no_verify(),
|
||||
)
|
||||
_LOGGER.debug("Connection to %s successful", host)
|
||||
except TimeoutError as err:
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.14.1"],
|
||||
"requirements": ["pydaikin==2.15.0"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.43.0", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.44.0", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"dependencies": ["ssdp"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.43.0"],
|
||||
"requirements": ["async-upnp-client==0.44.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -50,10 +50,10 @@ class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
username = auth["cdp_internal_user_id"].lower()
|
||||
username = auth["internalUserID"].lower()
|
||||
await self.async_set_unique_id(username)
|
||||
self._abort_if_unique_id_configured()
|
||||
email = auth["email"].lower()
|
||||
email = auth["loginEmailAddress"].lower()
|
||||
data = {
|
||||
CONF_EMAIL: email,
|
||||
CONF_USERNAME: username,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/duke_energy",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodukeenergy==0.2.2"]
|
||||
"requirements": ["aiodukeenergy==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -91,15 +91,15 @@ class EcoNetWaterHeater(EcoNetEntity[WaterHeater], WaterHeaterEntity):
|
||||
def operation_list(self) -> list[str]:
|
||||
"""List of available operation modes."""
|
||||
econet_modes = self.water_heater.modes
|
||||
op_list = []
|
||||
operation_modes = set()
|
||||
for mode in econet_modes:
|
||||
if (
|
||||
mode is not WaterHeaterOperationMode.UNKNOWN
|
||||
and mode is not WaterHeaterOperationMode.VACATION
|
||||
):
|
||||
ha_mode = ECONET_STATE_TO_HA[mode]
|
||||
op_list.append(ha_mode)
|
||||
return op_list
|
||||
operation_modes.add(ha_mode)
|
||||
return list(operation_modes)
|
||||
|
||||
@property
|
||||
def supported_features(self) -> WaterHeaterEntityFeature:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.4.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.5.0"]
|
||||
}
|
||||
|
||||
@@ -100,7 +100,11 @@ class ElkEntity(Entity):
|
||||
return {"index": self._element.index + 1}
|
||||
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
pass
|
||||
"""Handle changes to the element.
|
||||
|
||||
This method is called when the element changes. It should be
|
||||
overridden by subclasses to handle the changes.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _element_callback(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
@@ -111,7 +115,7 @@ class ElkEntity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callback for ElkM1 changes and update entity state."""
|
||||
self._element.add_callback(self._element_callback)
|
||||
self._element_callback(self._element, {})
|
||||
self._element_changed(self._element, {})
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"requirements": ["pyenphase==1.25.1"],
|
||||
"requirements": ["pyenphase==1.25.5"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -128,8 +128,23 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._password = ""
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
return await self.async_step_reauth_encryption_removed_confirm()
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_encryption_removed_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthorization flow when encryption was removed."""
|
||||
if user_input is not None:
|
||||
self._noise_psk = None
|
||||
return self._async_get_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_encryption_removed_confirm",
|
||||
description_placeholders={"name": self._name},
|
||||
)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -282,15 +282,18 @@ class RuntimeEntryData:
|
||||
) -> None:
|
||||
"""Distribute an update of static infos to all platforms."""
|
||||
# First, load all platforms
|
||||
needed_platforms = set()
|
||||
if async_get_dashboard(hass):
|
||||
needed_platforms.add(Platform.UPDATE)
|
||||
needed_platforms: set[Platform] = set()
|
||||
|
||||
if self.device_info and self.device_info.voice_assistant_feature_flags_compat(
|
||||
self.api_version
|
||||
):
|
||||
needed_platforms.add(Platform.BINARY_SENSOR)
|
||||
needed_platforms.add(Platform.SELECT)
|
||||
if self.device_info:
|
||||
if async_get_dashboard(hass):
|
||||
# Only load the update platform if the device_info is set
|
||||
# When we restore the entry, the device_info may not be set yet
|
||||
# and we don't want to load the update platform since it needs
|
||||
# a complete device_info.
|
||||
needed_platforms.add(Platform.UPDATE)
|
||||
if self.device_info.voice_assistant_feature_flags_compat(self.api_version):
|
||||
needed_platforms.add(Platform.BINARY_SENSOR)
|
||||
needed_platforms.add(Platform.SELECT)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
registry_get_entity = ent_reg.async_get_entity_id
|
||||
@@ -312,18 +315,19 @@ class RuntimeEntryData:
|
||||
|
||||
# Make a dict of the EntityInfo by type and send
|
||||
# them to the listeners for each specific EntityInfo type
|
||||
infos_by_type: dict[type[EntityInfo], list[EntityInfo]] = {}
|
||||
infos_by_type: defaultdict[type[EntityInfo], list[EntityInfo]] = defaultdict(
|
||||
list
|
||||
)
|
||||
for info in infos:
|
||||
info_type = type(info)
|
||||
if info_type not in infos_by_type:
|
||||
infos_by_type[info_type] = []
|
||||
infos_by_type[info_type].append(info)
|
||||
infos_by_type[type(info)].append(info)
|
||||
|
||||
callbacks_by_type = self.entity_info_callbacks
|
||||
for type_, entity_infos in infos_by_type.items():
|
||||
if callbacks_ := callbacks_by_type.get(type_):
|
||||
for callback_ in callbacks_:
|
||||
callback_(entity_infos)
|
||||
for type_, callbacks in self.entity_info_callbacks.items():
|
||||
# If all entities for a type are removed, we
|
||||
# still need to call the callbacks with an empty list
|
||||
# to make sure the entities are removed.
|
||||
entity_infos = infos_by_type.get(type_, [])
|
||||
for callback_ in callbacks:
|
||||
callback_(entity_infos)
|
||||
|
||||
# Finally update static info subscriptions
|
||||
for callback_ in self.static_info_update_subscriptions:
|
||||
|
||||
@@ -13,6 +13,7 @@ from aioesphomeapi import (
|
||||
APIConnectionError,
|
||||
APIVersion,
|
||||
DeviceInfo as EsphomeDeviceInfo,
|
||||
EncryptionHelloAPIError,
|
||||
EntityInfo,
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
@@ -570,6 +571,7 @@ class ESPHomeManager:
|
||||
if isinstance(
|
||||
err,
|
||||
(
|
||||
EncryptionHelloAPIError,
|
||||
RequiresEncryptionAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
InvalidAuthAPIError,
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.7.0",
|
||||
"aioesphomeapi==29.8.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.12.0"
|
||||
],
|
||||
|
||||
@@ -43,6 +43,9 @@
|
||||
},
|
||||
"description": "The ESPHome device {name} enabled transport encryption or changed the encryption key. Please enter the updated key. You can find it in the ESPHome Dashboard or in your device configuration."
|
||||
},
|
||||
"reauth_encryption_removed_confirm": {
|
||||
"description": "The ESPHome device {name} disabled transport encryption. Please confirm that you want to remove the encryption key and allow unencrypted connections."
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to add the ESPHome node `{name}` to Home Assistant?",
|
||||
"title": "Discovered ESPHome node"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["evohome", "evohomeasync", "evohomeasync2"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["evohome-async==1.0.4"]
|
||||
"requirements": ["evohome-async==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -301,6 +301,7 @@ class FibaroController:
|
||||
device.ha_id = (
|
||||
f"{slugify(room_name)}_{slugify(device.name)}_{device.fibaro_id}"
|
||||
)
|
||||
platform = None
|
||||
if device.enabled and (not device.is_plugin or self._import_plugins):
|
||||
platform = self._map_device_to_platform(device)
|
||||
if platform is None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/forecast_solar",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["forecast-solar==4.0.0"]
|
||||
"requirements": ["forecast-solar==4.1.0"]
|
||||
}
|
||||
|
||||
@@ -193,7 +193,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
translation_key="max_kb_s_sent",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=_retrieve_max_kb_s_sent_state,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
@@ -201,7 +200,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
translation_key="max_kb_s_received",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=_retrieve_max_kb_s_received_state,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
@@ -225,6 +223,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
translation_key="link_kb_s_sent",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=_retrieve_link_kb_s_sent_state,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
@@ -232,12 +231,15 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
translation_key="link_kb_s_received",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=_retrieve_link_kb_s_received_state,
|
||||
),
|
||||
FritzSensorEntityDescription(
|
||||
key="link_noise_margin_sent",
|
||||
translation_key="link_noise_margin_sent",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_noise_margin_sent_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@@ -245,6 +247,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_noise_margin_received",
|
||||
translation_key="link_noise_margin_received",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_noise_margin_received_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@@ -252,6 +256,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_attenuation_sent",
|
||||
translation_key="link_attenuation_sent",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_attenuation_sent_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@@ -259,6 +265,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_attenuation_received",
|
||||
translation_key="link_attenuation_received",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_attenuation_received_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_HVAC_MODE,
|
||||
PRESET_BOOST,
|
||||
PRESET_COMFORT,
|
||||
PRESET_ECO,
|
||||
ClimateEntity,
|
||||
@@ -38,7 +39,7 @@ from .sensor import value_scheduled_preset
|
||||
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
||||
PRESET_HOLIDAY = "holiday"
|
||||
PRESET_SUMMER = "summer"
|
||||
PRESET_MODES = [PRESET_ECO, PRESET_COMFORT]
|
||||
PRESET_MODES = [PRESET_ECO, PRESET_COMFORT, PRESET_BOOST]
|
||||
SUPPORTED_FEATURES = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
@@ -194,6 +195,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
return PRESET_HOLIDAY
|
||||
if self.data.summer_active:
|
||||
return PRESET_SUMMER
|
||||
if self.data.target_temperature == ON_API_TEMPERATURE:
|
||||
return PRESET_BOOST
|
||||
if self.data.target_temperature == self.data.comfort_temperature:
|
||||
return PRESET_COMFORT
|
||||
if self.data.target_temperature == self.data.eco_temperature:
|
||||
@@ -211,6 +214,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity):
|
||||
await self.async_set_temperature(temperature=self.data.comfort_temperature)
|
||||
elif preset_mode == PRESET_ECO:
|
||||
await self.async_set_temperature(temperature=self.data.eco_temperature)
|
||||
elif preset_mode == PRESET_BOOST:
|
||||
await self.async_set_temperature(temperature=ON_REPORT_SET_TEMPERATURE)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> ClimateExtraAttributes:
|
||||
|
||||
@@ -137,6 +137,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = (
|
||||
key="battery",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suitable=lambda device: device.battery_level is not None,
|
||||
native_value=lambda device: device.battery_level,
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250328.0"]
|
||||
"requirements": ["home-assistant-frontend==20250404.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.1"]
|
||||
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from google import genai # type: ignore[attr-defined]
|
||||
from google import genai
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
@@ -179,28 +179,30 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
if not (
|
||||
user_input.get(CONF_LLM_HASS_API, "none") != "none"
|
||||
and user_input.get(CONF_USE_GOOGLE_SEARCH_TOOL, False) is True
|
||||
):
|
||||
# Don't allow to save options that enable the Google Seearch tool with an Assist API
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
errors[CONF_USE_GOOGLE_SEARCH_TOOL] = "invalid_google_search_option"
|
||||
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
options = user_input
|
||||
|
||||
schema = await google_generative_ai_config_option_schema(
|
||||
self.hass, options, self._genai_client
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(schema),
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -356,6 +356,15 @@ class GoogleGenerativeAIConversationEntity(
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
|
||||
@@ -43,6 +43,9 @@
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_google_search_option": "Google Search cannot be enabled alongside any Assist capability, this can only be used when Assist is set to \"No control\"."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -265,6 +265,11 @@
|
||||
"version_latest": {
|
||||
"name": "Newest version"
|
||||
}
|
||||
},
|
||||
"update": {
|
||||
"update": {
|
||||
"name": "[%key:component::update::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -39,7 +39,7 @@ from .entity import (
|
||||
from .update_helper import update_addon, update_core
|
||||
|
||||
ENTITY_DESCRIPTION = UpdateEntityDescription(
|
||||
name="Update",
|
||||
translation_key="update",
|
||||
key=ATTR_VERSION_LATEST,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "Country"
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -73,6 +73,19 @@ class HomeConnectApplianceData:
|
||||
self.settings.update(other.settings)
|
||||
self.status.update(other.status)
|
||||
|
||||
@classmethod
|
||||
def empty(cls, appliance: HomeAppliance) -> HomeConnectApplianceData:
|
||||
"""Return empty data."""
|
||||
return cls(
|
||||
commands=set(),
|
||||
events={},
|
||||
info=appliance,
|
||||
options={},
|
||||
programs=[],
|
||||
settings={},
|
||||
status={},
|
||||
)
|
||||
|
||||
|
||||
class HomeConnectCoordinator(
|
||||
DataUpdateCoordinator[dict[str, HomeConnectApplianceData]]
|
||||
@@ -358,15 +371,7 @@ class HomeConnectCoordinator(
|
||||
model=appliance.vib,
|
||||
)
|
||||
if appliance.ha_id not in self.data:
|
||||
self.data[appliance.ha_id] = HomeConnectApplianceData(
|
||||
commands=set(),
|
||||
events={},
|
||||
info=appliance,
|
||||
options={},
|
||||
programs=[],
|
||||
settings={},
|
||||
status={},
|
||||
)
|
||||
self.data[appliance.ha_id] = HomeConnectApplianceData.empty(appliance)
|
||||
else:
|
||||
self.data[appliance.ha_id].info.connected = appliance.connected
|
||||
old_appliances.remove(appliance.ha_id)
|
||||
@@ -402,6 +407,15 @@ class HomeConnectCoordinator(
|
||||
name=appliance.name,
|
||||
model=appliance.vib,
|
||||
)
|
||||
if not appliance.connected:
|
||||
_LOGGER.debug(
|
||||
"Appliance %s is not connected, skipping data fetch",
|
||||
appliance.ha_id,
|
||||
)
|
||||
if appliance_data_to_update:
|
||||
appliance_data_to_update.info.connected = False
|
||||
return appliance_data_to_update
|
||||
return HomeConnectApplianceData.empty(appliance)
|
||||
try:
|
||||
settings = {
|
||||
setting.key: setting
|
||||
|
||||
@@ -64,7 +64,6 @@ set_program_and_options:
|
||||
- selected_program
|
||||
program:
|
||||
example: dishcare_dishwasher_program_auto2
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
|
||||
@@ -31,7 +31,6 @@ class FirmwareUpdateCoordinator(DataUpdateCoordinator[FirmwareManifest]):
|
||||
_LOGGER,
|
||||
name="firmware update coordinator",
|
||||
update_interval=FIRMWARE_REFRESH_INTERVAL,
|
||||
always_update=False,
|
||||
)
|
||||
self.hass = hass
|
||||
self.session = session
|
||||
|
||||
@@ -33,6 +33,7 @@ from .util import (
|
||||
OwningIntegration,
|
||||
get_otbr_addon_manager,
|
||||
get_zigbee_flasher_addon_manager,
|
||||
guess_firmware_info,
|
||||
guess_hardware_owners,
|
||||
probe_silabs_firmware_info,
|
||||
)
|
||||
@@ -511,6 +512,16 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm a discovery."""
|
||||
assert self._device is not None
|
||||
fw_info = await guess_firmware_info(self.hass, self._device)
|
||||
|
||||
# If our guess for the firmware type is actually running, we can save the user
|
||||
# an unnecessary confirmation and silently confirm the flow
|
||||
for owner in fw_info.owners:
|
||||
if await owner.is_running(self.hass):
|
||||
self._probed_firmware_info = fw_info
|
||||
return self._async_flow_finished()
|
||||
|
||||
return await self.async_step_pick_firmware()
|
||||
|
||||
|
||||
|
||||
@@ -95,8 +95,7 @@ class BaseFirmwareUpdateEntity(
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
# Until this entity can be associated with a device, we must manually name it
|
||||
_attr_has_entity_name = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -195,11 +194,7 @@ class BaseFirmwareUpdateEntity(
|
||||
|
||||
def _update_attributes(self) -> None:
|
||||
"""Recompute the attributes of the entity."""
|
||||
|
||||
# This entity is not currently associated with a device so we must manually
|
||||
# give it a name
|
||||
self._attr_name = f"{self._config_entry.title} Update"
|
||||
self._attr_title = self.entity_description.firmware_name or "unknown"
|
||||
self._attr_title = self.entity_description.firmware_name or "Unknown"
|
||||
|
||||
if (
|
||||
self._current_firmware_info is None
|
||||
|
||||
@@ -3,19 +3,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
from homeassistant.components.homeassistant_hardware.util import guess_firmware_info
|
||||
from homeassistant.components.usb import (
|
||||
USBDevice,
|
||||
async_register_port_event_callback,
|
||||
scan_serial_ports,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DESCRIPTION, DEVICE, FIRMWARE, FIRMWARE_VERSION, PRODUCT
|
||||
from .const import (
|
||||
DESCRIPTION,
|
||||
DEVICE,
|
||||
DOMAIN,
|
||||
FIRMWARE,
|
||||
FIRMWARE_VERSION,
|
||||
MANUFACTURER,
|
||||
PID,
|
||||
PRODUCT,
|
||||
SERIAL_NUMBER,
|
||||
VID,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the ZBT-1 integration."""
|
||||
|
||||
@callback
|
||||
def async_port_event_callback(
|
||||
added: set[USBDevice], removed: set[USBDevice]
|
||||
) -> None:
|
||||
"""Handle USB port events."""
|
||||
current_entries_by_path = {
|
||||
entry.data[DEVICE]: entry
|
||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
}
|
||||
|
||||
for device in added | removed:
|
||||
path = device.device
|
||||
entry = current_entries_by_path.get(path)
|
||||
|
||||
if entry is not None:
|
||||
_LOGGER.debug(
|
||||
"Device %r has changed state, reloading config entry %s",
|
||||
path,
|
||||
entry,
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
async_register_port_event_callback(hass, async_port_event_callback)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a Home Assistant SkyConnect config entry."""
|
||||
|
||||
# Postpone loading the config entry if the device is missing
|
||||
device_path = entry.data[DEVICE]
|
||||
if not await hass.async_add_executor_job(os.path.exists, device_path):
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_disconnected",
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, ["update"])
|
||||
|
||||
return True
|
||||
@@ -23,6 +83,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await hass.config_entries.async_unload_platforms(entry, ["update"])
|
||||
return True
|
||||
|
||||
|
||||
@@ -30,7 +91,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
"""Migrate old entry."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s:%s", config_entry.version, config_entry.minor_version
|
||||
"Migrating from version %s.%s", config_entry.version, config_entry.minor_version
|
||||
)
|
||||
|
||||
if config_entry.version == 1:
|
||||
@@ -65,6 +126,43 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
minor_version=3,
|
||||
)
|
||||
|
||||
if config_entry.minor_version == 3:
|
||||
# Old SkyConnect config entries were missing keys
|
||||
if any(
|
||||
key not in config_entry.data
|
||||
for key in (VID, PID, MANUFACTURER, PRODUCT, SERIAL_NUMBER)
|
||||
):
|
||||
serial_ports = await hass.async_add_executor_job(scan_serial_ports)
|
||||
serial_ports_info = {port.device: port for port in serial_ports}
|
||||
device = config_entry.data[DEVICE]
|
||||
|
||||
if not (usb_info := serial_ports_info.get(device)):
|
||||
raise HomeAssistantError(
|
||||
f"USB device {device} is missing, cannot migrate"
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data={
|
||||
**config_entry.data,
|
||||
VID: usb_info.vid,
|
||||
PID: usb_info.pid,
|
||||
MANUFACTURER: usb_info.manufacturer,
|
||||
PRODUCT: usb_info.description,
|
||||
DESCRIPTION: usb_info.description,
|
||||
SERIAL_NUMBER: usb_info.serial_number,
|
||||
},
|
||||
version=1,
|
||||
minor_version=4,
|
||||
)
|
||||
else:
|
||||
# Existing entries are migrated by just incrementing the version
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
version=1,
|
||||
minor_version=4,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
|
||||
@@ -81,7 +81,7 @@ class HomeAssistantSkyConnectConfigFlow(
|
||||
"""Handle a config flow for Home Assistant SkyConnect."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
MINOR_VERSION = 4
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the config flow."""
|
||||
|
||||
@@ -5,17 +5,21 @@ from __future__ import annotations
|
||||
from homeassistant.components.hardware.models import HardwareInfo, USBInfo
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config_flow import HomeAssistantSkyConnectConfigFlow
|
||||
from .const import DOMAIN
|
||||
from .util import get_hardware_variant
|
||||
|
||||
DOCUMENTATION_URL = "https://skyconnect.home-assistant.io/documentation/"
|
||||
EXPECTED_ENTRY_VERSION = (
|
||||
HomeAssistantSkyConnectConfigFlow.VERSION,
|
||||
HomeAssistantSkyConnectConfigFlow.MINOR_VERSION,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
"""Return board info."""
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
|
||||
return [
|
||||
HardwareInfo(
|
||||
board=None,
|
||||
@@ -31,4 +35,6 @@ def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
url=DOCUMENTATION_URL,
|
||||
)
|
||||
for entry in entries
|
||||
# Ignore unmigrated config entries in the hardware page
|
||||
if (entry.version, entry.minor_version) == EXPECTED_ENTRY_VERSION
|
||||
]
|
||||
|
||||
@@ -195,5 +195,10 @@
|
||||
"run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]",
|
||||
"uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"device_disconnected": {
|
||||
"message": "The device is not plugged in"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,11 +21,20 @@ from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import FIRMWARE, FIRMWARE_VERSION, NABU_CASA_FIRMWARE_RELEASES_URL
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
FIRMWARE,
|
||||
FIRMWARE_VERSION,
|
||||
NABU_CASA_FIRMWARE_RELEASES_URL,
|
||||
PRODUCT,
|
||||
SERIAL_NUMBER,
|
||||
HardwareVariant,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,7 +51,7 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[
|
||||
fw_type="skyconnect_zigbee_ncp",
|
||||
version_key="ezsp_version",
|
||||
expected_firmware_type=ApplicationType.EZSP,
|
||||
firmware_name="EmberZNet",
|
||||
firmware_name="EmberZNet Zigbee",
|
||||
),
|
||||
ApplicationType.SPINEL: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
@@ -55,6 +64,28 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[
|
||||
expected_firmware_type=ApplicationType.SPINEL,
|
||||
firmware_name="OpenThread RCP",
|
||||
),
|
||||
ApplicationType.CPC: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
version_parser=lambda fw: fw,
|
||||
fw_type="skyconnect_multipan",
|
||||
version_key="cpc_version",
|
||||
expected_firmware_type=ApplicationType.CPC,
|
||||
firmware_name="Multiprotocol",
|
||||
),
|
||||
ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
version_parser=lambda fw: fw,
|
||||
fw_type=None, # We don't want to update the bootloader
|
||||
version_key="gecko_bootloader_version",
|
||||
expected_firmware_type=ApplicationType.GECKO_BOOTLOADER,
|
||||
firmware_name="Gecko Bootloader",
|
||||
),
|
||||
None: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
display_precision=0,
|
||||
@@ -77,9 +108,16 @@ def _async_create_update_entity(
|
||||
) -> FirmwareUpdateEntity:
|
||||
"""Create an update entity that handles firmware type changes."""
|
||||
firmware_type = config_entry.data[FIRMWARE]
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[
|
||||
ApplicationType(firmware_type) if firmware_type is not None else None
|
||||
]
|
||||
|
||||
try:
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[
|
||||
ApplicationType(firmware_type)
|
||||
]
|
||||
except (KeyError, ValueError):
|
||||
_LOGGER.debug(
|
||||
"Unknown firmware type %r, using default entity description", firmware_type
|
||||
)
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None]
|
||||
|
||||
entity = FirmwareUpdateEntity(
|
||||
device=config_entry.data["device"],
|
||||
@@ -141,8 +179,18 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Initialize the SkyConnect firmware update entity."""
|
||||
super().__init__(device, config_entry, update_coordinator, entity_description)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self._config_entry.data['serial_number']}_{self.entity_description.key}"
|
||||
variant = HardwareVariant.from_usb_product_name(
|
||||
self._config_entry.data[PRODUCT]
|
||||
)
|
||||
serial_number = self._config_entry.data[SERIAL_NUMBER]
|
||||
|
||||
self._attr_unique_id = f"{serial_number}_{self.entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_number)},
|
||||
name=f"{variant.full_name} ({serial_number[:8]})",
|
||||
model=variant.full_name,
|
||||
manufacturer="Nabu Casa",
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
# Use the cached firmware info if it exists
|
||||
@@ -155,6 +203,17 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
source="homeassistant_sky_connect",
|
||||
)
|
||||
|
||||
def _update_attributes(self) -> None:
|
||||
"""Recompute the attributes of the entity."""
|
||||
super()._update_attributes()
|
||||
|
||||
assert self.device_entry is not None
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}",
|
||||
)
|
||||
|
||||
@callback
|
||||
def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None:
|
||||
"""Handle updated firmware info being pushed by an integration."""
|
||||
|
||||
@@ -62,6 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await hass.config_entries.async_unload_platforms(entry, ["update"])
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
DOMAIN = "homeassistant_yellow"
|
||||
|
||||
RADIO_MODEL = "Home Assistant Yellow"
|
||||
RADIO_MANUFACTURER = "Nabu Casa"
|
||||
MODEL = "Home Assistant Yellow"
|
||||
MANUFACTURER = "Nabu Casa"
|
||||
|
||||
RADIO_DEVICE = "/dev/ttyAMA1"
|
||||
|
||||
ZHA_HW_DISCOVERY_DATA = {
|
||||
|
||||
@@ -149,5 +149,12 @@
|
||||
"run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]",
|
||||
"uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"update": {
|
||||
"radio_firmware": {
|
||||
"name": "Radio firmware"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,13 +21,17 @@ from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
FIRMWARE,
|
||||
FIRMWARE_VERSION,
|
||||
MANUFACTURER,
|
||||
MODEL,
|
||||
NABU_CASA_FIRMWARE_RELEASES_URL,
|
||||
RADIO_DEVICE,
|
||||
)
|
||||
@@ -39,7 +43,8 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[
|
||||
ApplicationType | None, FirmwareUpdateEntityDescription
|
||||
] = {
|
||||
ApplicationType.EZSP: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
key="radio_firmware",
|
||||
translation_key="radio_firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -47,10 +52,11 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[
|
||||
fw_type="yellow_zigbee_ncp",
|
||||
version_key="ezsp_version",
|
||||
expected_firmware_type=ApplicationType.EZSP,
|
||||
firmware_name="EmberZNet",
|
||||
firmware_name="EmberZNet Zigbee",
|
||||
),
|
||||
ApplicationType.SPINEL: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
key="radio_firmware",
|
||||
translation_key="radio_firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -60,8 +66,33 @@ FIRMWARE_ENTITY_DESCRIPTIONS: dict[
|
||||
expected_firmware_type=ApplicationType.SPINEL,
|
||||
firmware_name="OpenThread RCP",
|
||||
),
|
||||
ApplicationType.CPC: FirmwareUpdateEntityDescription(
|
||||
key="radio_firmware",
|
||||
translation_key="radio_firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
version_parser=lambda fw: fw,
|
||||
fw_type="yellow_multipan",
|
||||
version_key="cpc_version",
|
||||
expected_firmware_type=ApplicationType.CPC,
|
||||
firmware_name="Multiprotocol",
|
||||
),
|
||||
ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription(
|
||||
key="radio_firmware",
|
||||
translation_key="radio_firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
version_parser=lambda fw: fw,
|
||||
fw_type=None, # We don't want to update the bootloader
|
||||
version_key="gecko_bootloader_version",
|
||||
expected_firmware_type=ApplicationType.GECKO_BOOTLOADER,
|
||||
firmware_name="Gecko Bootloader",
|
||||
),
|
||||
None: FirmwareUpdateEntityDescription(
|
||||
key="firmware",
|
||||
key="radio_firmware",
|
||||
translation_key="radio_firmware",
|
||||
display_precision=0,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -82,9 +113,16 @@ def _async_create_update_entity(
|
||||
) -> FirmwareUpdateEntity:
|
||||
"""Create an update entity that handles firmware type changes."""
|
||||
firmware_type = config_entry.data[FIRMWARE]
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[
|
||||
ApplicationType(firmware_type) if firmware_type is not None else None
|
||||
]
|
||||
|
||||
try:
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[
|
||||
ApplicationType(firmware_type)
|
||||
]
|
||||
except (KeyError, ValueError):
|
||||
_LOGGER.debug(
|
||||
"Unknown firmware type %r, using default entity description", firmware_type
|
||||
)
|
||||
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None]
|
||||
|
||||
entity = FirmwareUpdateEntity(
|
||||
device=RADIO_DEVICE,
|
||||
@@ -145,8 +183,13 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
) -> None:
|
||||
"""Initialize the Yellow firmware update entity."""
|
||||
super().__init__(device, config_entry, update_coordinator, entity_description)
|
||||
|
||||
self._attr_unique_id = self.entity_description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, "yellow")},
|
||||
name=MODEL,
|
||||
model=MODEL,
|
||||
manufacturer=MANUFACTURER,
|
||||
)
|
||||
|
||||
# Use the cached firmware info if it exists
|
||||
if self._config_entry.data[FIRMWARE] is not None:
|
||||
@@ -158,6 +201,17 @@ class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
source="homeassistant_yellow",
|
||||
)
|
||||
|
||||
def _update_attributes(self) -> None:
|
||||
"""Recompute the attributes of the entity."""
|
||||
super()._update_attributes()
|
||||
|
||||
assert self.device_entry is not None
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}",
|
||||
)
|
||||
|
||||
@callback
|
||||
def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None:
|
||||
"""Handle updated firmware info being pushed by an integration."""
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.8"],
|
||||
"requirements": ["aiohomekit==3.2.13"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -197,5 +197,11 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_effect_none": {
|
||||
"title": "Light turned on with deprecated effect",
|
||||
"description": "A light was turned on with the deprecated effect `None`. This has been replaced with `off`. Please update any automations, scenes, or scripts that use this effect."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ from homeassistant.components.light import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from ..bridge import HueBridge
|
||||
@@ -44,6 +45,9 @@ FALLBACK_MIN_KELVIN = 6500
|
||||
FALLBACK_MAX_KELVIN = 2000
|
||||
FALLBACK_KELVIN = 5800 # halfway
|
||||
|
||||
# HA 2025.4 replaced the deprecated effect "None" with HA default "off"
|
||||
DEPRECATED_EFFECT_NONE = "None"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -233,6 +237,23 @@ class HueLight(HueBaseEntity, LightEntity):
|
||||
self._color_temp_active = color_temp is not None
|
||||
flash = kwargs.get(ATTR_FLASH)
|
||||
effect = effect_str = kwargs.get(ATTR_EFFECT)
|
||||
if effect_str == DEPRECATED_EFFECT_NONE:
|
||||
# deprecated effect "None" is now "off"
|
||||
effect_str = EFFECT_OFF
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"deprecated_effect_none",
|
||||
breaks_in_ha_version="2025.10.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_effect_none",
|
||||
)
|
||||
self.logger.warning(
|
||||
"Detected deprecated effect 'None' in %s, use 'off' instead. "
|
||||
"This will stop working in HA 2025.10",
|
||||
self.entity_id,
|
||||
)
|
||||
if effect_str == EFFECT_OFF:
|
||||
# ignore effect if set to "off" and we have no effect active
|
||||
# the special effect "off" is only used to stop an active effect
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/iaqualink",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["iaqualink"],
|
||||
"requirements": ["iaqualink==0.5.0", "h2==4.1.0"],
|
||||
"requirements": ["iaqualink==0.5.3", "h2==4.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Hostname or IP-address of the Intergas gateway.",
|
||||
"username": "The username to log into the gateway. This is `admin` in most cases.",
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the gateway or is `intergas` for some older devices."
|
||||
"username": "The username to log in to the gateway. This is `admin` in most cases.",
|
||||
"password": "The password to log in to the gateway, is printed at the bottom of the gateway or is `intergas` for some older devices."
|
||||
}
|
||||
},
|
||||
"dhcp_auth": {
|
||||
@@ -22,8 +22,8 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The username to log into the gateway. This is `admin` in most cases.",
|
||||
"password": "The password to log into the gateway, is printed at the bottom of the Gateway or is `intergas` for some older devices."
|
||||
"username": "[%key:component::incomfort::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::incomfort::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"dhcp_confirm": {
|
||||
|
||||
@@ -138,7 +138,7 @@ async def async_setup_entry(
|
||||
for vtype, _, vid in isy.variables.children:
|
||||
numbers.append(isy.variables[vtype][vid])
|
||||
if (
|
||||
isy.conf[CONFIG_NETWORKING] or isy.conf[CONFIG_PORTAL]
|
||||
isy.conf[CONFIG_NETWORKING] or isy.conf.get(CONFIG_PORTAL)
|
||||
) and isy.networking.nobjs:
|
||||
isy_data.devices[CONF_NETWORK] = _create_service_device_info(
|
||||
isy, name=CONFIG_NETWORKING, unique_id=CONF_NETWORK
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyisy"],
|
||||
"requirements": ["pyisy==3.1.14"],
|
||||
"requirements": ["pyisy==3.1.15"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Universal Devices Inc.",
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.26.1", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.26.5", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.26.1", "led-ble==1.1.6"]
|
||||
"requirements": ["bluetooth-data-tools==1.26.5", "led-ble==1.1.6"]
|
||||
}
|
||||
|
||||
@@ -63,10 +63,12 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
# Add a callback to handle core config update.
|
||||
self.unit_system: str | None = None
|
||||
self.hass.bus.async_listen(
|
||||
event_type=EVENT_CORE_CONFIG_UPDATE,
|
||||
listener=self._handle_update_config,
|
||||
event_filter=self.async_config_update_filter,
|
||||
self.config_entry.async_on_unload(
|
||||
self.hass.bus.async_listen(
|
||||
event_type=EVENT_CORE_CONFIG_UPDATE,
|
||||
listener=self._handle_update_config,
|
||||
event_filter=self.async_config_update_filter,
|
||||
)
|
||||
)
|
||||
|
||||
async def _handle_update_config(self, _: Event) -> None:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==9.0.1"]
|
||||
"requirements": ["ical==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==9.0.1"]
|
||||
"requirements": ["ical==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import frontend, websocket_api
|
||||
from homeassistant.components import frontend, onboarding, websocket_api
|
||||
from homeassistant.config import (
|
||||
async_hass_config_yaml,
|
||||
async_process_component_and_handle_errors,
|
||||
@@ -17,6 +17,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import collection, config_validation as cv
|
||||
from homeassistant.helpers.frame import report_usage
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.util import slugify
|
||||
@@ -282,6 +283,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
STORAGE_DASHBOARD_UPDATE_FIELDS,
|
||||
).async_setup(hass)
|
||||
|
||||
def create_map_dashboard() -> None:
|
||||
"""Create a map dashboard."""
|
||||
hass.async_create_task(_create_map_dashboard(hass, dashboards_collection))
|
||||
|
||||
if not onboarding.async_is_onboarded(hass):
|
||||
onboarding.async_add_listener(hass, create_map_dashboard)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -323,3 +331,25 @@ def _register_panel(
|
||||
kwargs["sidebar_icon"] = config.get(CONF_ICON, DEFAULT_ICON)
|
||||
|
||||
frontend.async_register_built_in_panel(hass, DOMAIN, **kwargs)
|
||||
|
||||
|
||||
async def _create_map_dashboard(
|
||||
hass: HomeAssistant, dashboards_collection: dashboard.DashboardsCollection
|
||||
) -> None:
|
||||
"""Create a map dashboard."""
|
||||
translations = await async_get_translations(
|
||||
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
|
||||
)
|
||||
title = translations["component.onboarding.dashboard.map.title"]
|
||||
|
||||
await dashboards_collection.async_create_item(
|
||||
{
|
||||
CONF_ALLOW_SINGLE_WORD: True,
|
||||
CONF_ICON: "mdi:map",
|
||||
CONF_TITLE: title,
|
||||
CONF_URL_PATH: "map",
|
||||
}
|
||||
)
|
||||
|
||||
map_store = hass.data[LOVELACE_DATA].dashboards["map"]
|
||||
await map_store.async_save({"strategy": {"type": "map"}})
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]",
|
||||
"medium_type": "Medium Type"
|
||||
"medium_type": "Medium type"
|
||||
}
|
||||
},
|
||||
"bluetooth_confirm": {
|
||||
|
||||
@@ -62,6 +62,7 @@ from ..entity import MqttEntity
|
||||
from ..models import (
|
||||
MqttCommandTemplate,
|
||||
MqttValueTemplate,
|
||||
PayloadSentinel,
|
||||
PublishPayloadType,
|
||||
ReceiveMessage,
|
||||
)
|
||||
@@ -126,7 +127,9 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity):
|
||||
_command_templates: dict[
|
||||
str, Callable[[PublishPayloadType, TemplateVarsType], PublishPayloadType]
|
||||
]
|
||||
_value_templates: dict[str, Callable[[ReceivePayloadType], ReceivePayloadType]]
|
||||
_value_templates: dict[
|
||||
str, Callable[[ReceivePayloadType, ReceivePayloadType], ReceivePayloadType]
|
||||
]
|
||||
_fixed_color_mode: ColorMode | str | None
|
||||
_topics: dict[str, str | None]
|
||||
|
||||
@@ -203,73 +206,133 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity):
|
||||
@callback
|
||||
def _state_received(self, msg: ReceiveMessage) -> None:
|
||||
"""Handle new MQTT messages."""
|
||||
state = self._value_templates[CONF_STATE_TEMPLATE](msg.payload)
|
||||
if state == STATE_ON:
|
||||
state_value = self._value_templates[CONF_STATE_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
if not state_value:
|
||||
_LOGGER.debug(
|
||||
"Ignoring message from '%s' with empty state value", msg.topic
|
||||
)
|
||||
elif state_value == STATE_ON:
|
||||
self._attr_is_on = True
|
||||
elif state == STATE_OFF:
|
||||
elif state_value == STATE_OFF:
|
||||
self._attr_is_on = False
|
||||
elif state == PAYLOAD_NONE:
|
||||
elif state_value == PAYLOAD_NONE:
|
||||
self._attr_is_on = None
|
||||
else:
|
||||
_LOGGER.warning("Invalid state value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid state value '%s' received from %s",
|
||||
state_value,
|
||||
msg.topic,
|
||||
)
|
||||
|
||||
if CONF_BRIGHTNESS_TEMPLATE in self._config:
|
||||
try:
|
||||
if brightness := int(
|
||||
self._value_templates[CONF_BRIGHTNESS_TEMPLATE](msg.payload)
|
||||
):
|
||||
self._attr_brightness = brightness
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Ignoring zero brightness value for entity %s",
|
||||
self.entity_id,
|
||||
brightness_value = self._value_templates[CONF_BRIGHTNESS_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
if not brightness_value:
|
||||
_LOGGER.debug(
|
||||
"Ignoring message from '%s' with empty brightness value",
|
||||
msg.topic,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
if brightness := int(brightness_value):
|
||||
self._attr_brightness = brightness
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Ignoring zero brightness value for entity %s",
|
||||
self.entity_id,
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.warning(
|
||||
"Invalid brightness value '%s' received from %s",
|
||||
brightness_value,
|
||||
msg.topic,
|
||||
)
|
||||
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid brightness value received from %s", msg.topic)
|
||||
|
||||
if CONF_COLOR_TEMP_TEMPLATE in self._config:
|
||||
try:
|
||||
color_temp = self._value_templates[CONF_COLOR_TEMP_TEMPLATE](
|
||||
msg.payload
|
||||
color_temp_value = self._value_templates[CONF_COLOR_TEMP_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
if not color_temp_value:
|
||||
_LOGGER.debug(
|
||||
"Ignoring message from '%s' with empty color temperature value",
|
||||
msg.topic,
|
||||
)
|
||||
self._attr_color_temp_kelvin = (
|
||||
int(color_temp)
|
||||
if self._color_temp_kelvin
|
||||
else color_util.color_temperature_mired_to_kelvin(int(color_temp))
|
||||
if color_temp != "None"
|
||||
else None
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid color temperature value received")
|
||||
else:
|
||||
try:
|
||||
self._attr_color_temp_kelvin = (
|
||||
int(color_temp_value)
|
||||
if self._color_temp_kelvin
|
||||
else color_util.color_temperature_mired_to_kelvin(
|
||||
int(color_temp_value)
|
||||
)
|
||||
if color_temp_value != "None"
|
||||
else None
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.warning(
|
||||
"Invalid color temperature value '%s' received from %s",
|
||||
color_temp_value,
|
||||
msg.topic,
|
||||
)
|
||||
|
||||
if (
|
||||
CONF_RED_TEMPLATE in self._config
|
||||
and CONF_GREEN_TEMPLATE in self._config
|
||||
and CONF_BLUE_TEMPLATE in self._config
|
||||
):
|
||||
try:
|
||||
red = self._value_templates[CONF_RED_TEMPLATE](msg.payload)
|
||||
green = self._value_templates[CONF_GREEN_TEMPLATE](msg.payload)
|
||||
blue = self._value_templates[CONF_BLUE_TEMPLATE](msg.payload)
|
||||
if red == "None" and green == "None" and blue == "None":
|
||||
self._attr_hs_color = None
|
||||
else:
|
||||
self._attr_hs_color = color_util.color_RGB_to_hs(
|
||||
int(red), int(green), int(blue)
|
||||
)
|
||||
red_value = self._value_templates[CONF_RED_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
green_value = self._value_templates[CONF_GREEN_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
blue_value = self._value_templates[CONF_BLUE_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
if not red_value or not green_value or not blue_value:
|
||||
_LOGGER.debug(
|
||||
"Ignoring message from '%s' with empty color value", msg.topic
|
||||
)
|
||||
elif red_value == "None" and green_value == "None" and blue_value == "None":
|
||||
self._attr_hs_color = None
|
||||
self._update_color_mode()
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid color value received")
|
||||
else:
|
||||
try:
|
||||
self._attr_hs_color = color_util.color_RGB_to_hs(
|
||||
int(red_value), int(green_value), int(blue_value)
|
||||
)
|
||||
self._update_color_mode()
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid color value received from %s", msg.topic)
|
||||
|
||||
if CONF_EFFECT_TEMPLATE in self._config:
|
||||
effect = str(self._value_templates[CONF_EFFECT_TEMPLATE](msg.payload))
|
||||
if (
|
||||
effect_list := self._config[CONF_EFFECT_LIST]
|
||||
) and effect in effect_list:
|
||||
self._attr_effect = effect
|
||||
effect_value = self._value_templates[CONF_EFFECT_TEMPLATE](
|
||||
msg.payload,
|
||||
PayloadSentinel.NONE,
|
||||
)
|
||||
if not effect_value:
|
||||
_LOGGER.debug(
|
||||
"Ignoring message from '%s' with empty effect value", msg.topic
|
||||
)
|
||||
elif (effect_list := self._config[CONF_EFFECT_LIST]) and str(
|
||||
effect_value
|
||||
) in effect_list:
|
||||
self._attr_effect = str(effect_value)
|
||||
else:
|
||||
_LOGGER.warning("Unsupported effect value received")
|
||||
_LOGGER.warning(
|
||||
"Unsupported effect value '%s' received from %s",
|
||||
effect_value,
|
||||
msg.topic,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _prepare_subscribe_topics(self) -> None:
|
||||
|
||||
@@ -126,7 +126,7 @@
|
||||
"payload_not_available": "Payload not available"
|
||||
},
|
||||
"data_description": {
|
||||
"availability_topic": "Topic to receive the availabillity payload on",
|
||||
"availability_topic": "Topic to receive the availability payload on",
|
||||
"availability_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-templates-with-the-mqtt-integration) to render the availability payload received on the availability topic",
|
||||
"payload_available": "The payload that indicates the device is available (defaults to 'online')",
|
||||
"payload_not_available": "The payload that indicates the device is not available (defaults to 'offline')"
|
||||
@@ -219,10 +219,10 @@
|
||||
"options": "Add option"
|
||||
},
|
||||
"data_description": {
|
||||
"device_class": "The device class of the {platform} entity. [Learn more.]({url}#device_class)",
|
||||
"state_class": "The [state_class](https://developers.home-assistant.io/docs/core/entity/sensor/#available-state-classes) of the sensor. [Learn more.]({url}#state_class)",
|
||||
"device_class": "The Device class of the {platform} entity. [Learn more.]({url}#device_class)",
|
||||
"state_class": "The [State class](https://developers.home-assistant.io/docs/core/entity/sensor/#available-state-classes) of the sensor. [Learn more.]({url}#state_class)",
|
||||
"unit_of_measurement": "Defines the unit of measurement of the sensor, if any.",
|
||||
"options": "Options for allowed sensor state values. The sensor’s device_class must be set to Enumeration. The options option cannot be used together with State Class or Unit of measurement."
|
||||
"options": "Options for allowed sensor state values. The sensor’s Device class must be set to Enumeration. The 'Options' setting cannot be used together with State class or Unit of measurement."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
|
||||
@@ -26,7 +26,7 @@ from . import subscription
|
||||
from .config import DEFAULT_RETAIN, MQTT_RO_SCHEMA
|
||||
from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC, PAYLOAD_EMPTY_JSON
|
||||
from .entity import MqttEntity, async_setup_entity_entry_helper
|
||||
from .models import MqttValueTemplate, ReceiveMessage
|
||||
from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage
|
||||
from .schemas import MQTT_ENTITY_COMMON_SCHEMA
|
||||
from .util import valid_publish_topic, valid_subscribe_topic
|
||||
|
||||
@@ -136,7 +136,18 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity):
|
||||
@callback
|
||||
def _handle_state_message_received(self, msg: ReceiveMessage) -> None:
|
||||
"""Handle receiving state message via MQTT."""
|
||||
payload = self._templates[CONF_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._templates[CONF_VALUE_TEMPLATE](
|
||||
msg.payload, PayloadSentinel.DEFAULT
|
||||
)
|
||||
|
||||
if payload is PayloadSentinel.DEFAULT:
|
||||
_LOGGER.warning(
|
||||
"Unable to process payload '%s' for topic %s, with value template '%s'",
|
||||
msg.payload,
|
||||
msg.topic,
|
||||
self._config.get(CONF_VALUE_TEMPLATE),
|
||||
)
|
||||
return
|
||||
|
||||
if not payload or payload == PAYLOAD_EMPTY_JSON:
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["music_assistant"],
|
||||
"requirements": ["music-assistant-client==1.1.1"],
|
||||
"requirements": ["music-assistant-client==1.2.0"],
|
||||
"zeroconf": ["_mass._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -94,6 +94,12 @@ SUPPORTED_FEATURES_BASE = (
|
||||
| MediaPlayerEntityFeature.MEDIA_ENQUEUE
|
||||
| MediaPlayerEntityFeature.MEDIA_ANNOUNCE
|
||||
| MediaPlayerEntityFeature.SEEK
|
||||
# we always add pause support,
|
||||
# regardless if the underlying player actually natively supports pause
|
||||
# because the MA behavior is to internally handle pause with stop
|
||||
# (and a resume position) and we'd like to keep the UX consistent
|
||||
# background info: https://github.com/home-assistant/core/issues/140118
|
||||
| MediaPlayerEntityFeature.PAUSE
|
||||
)
|
||||
|
||||
QUEUE_OPTION_MAP = {
|
||||
@@ -145,6 +151,8 @@ async def async_setup_entry(
|
||||
assert event.object_id is not None
|
||||
if event.object_id in added_ids:
|
||||
return
|
||||
if not player.expose_to_ha:
|
||||
return
|
||||
added_ids.add(event.object_id)
|
||||
async_add_entities([MusicAssistantPlayer(mass, event.object_id)])
|
||||
|
||||
@@ -153,6 +161,8 @@ async def async_setup_entry(
|
||||
mass_players = []
|
||||
# add all current players
|
||||
for player in mass.players:
|
||||
if not player.expose_to_ha:
|
||||
continue
|
||||
added_ids.add(player.player_id)
|
||||
mass_players.append(MusicAssistantPlayer(mass, player.player_id))
|
||||
|
||||
@@ -697,8 +707,6 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
supported_features = SUPPORTED_FEATURES_BASE
|
||||
if PlayerFeature.SET_MEMBERS in self.player.supported_features:
|
||||
supported_features |= MediaPlayerEntityFeature.GROUPING
|
||||
if PlayerFeature.PAUSE in self.player.supported_features:
|
||||
supported_features |= MediaPlayerEntityFeature.PAUSE
|
||||
if self.player.mute_control != PLAYER_CONTROL_NONE:
|
||||
supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
if self.player.volume_control != PLAYER_CONTROL_NONE:
|
||||
|
||||
@@ -34,7 +34,7 @@ def validate_prices(
|
||||
index: int,
|
||||
) -> float | None:
|
||||
"""Validate and return."""
|
||||
if result := func(entity)[area][index]:
|
||||
if (result := func(entity)[area][index]) is not None:
|
||||
return result / 1000
|
||||
return None
|
||||
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, ChargerStatus, OhmeApiClient
|
||||
|
||||
@@ -23,7 +24,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription):
|
||||
"""Class describing Ohme button entities."""
|
||||
|
||||
press_fn: Callable[[OhmeApiClient], Awaitable[None]]
|
||||
press_fn: Callable[[OhmeApiClient], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
BUTTON_DESCRIPTIONS = [
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ohme==1.4.1"]
|
||||
"requirements": ["ohme==1.5.1"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Platform for number."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, OhmeApiClient
|
||||
|
||||
@@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeNumberDescription(OhmeEntityDescription, NumberEntityDescription):
|
||||
"""Class describing Ohme number entities."""
|
||||
|
||||
set_fn: Callable[[OhmeApiClient, float], Awaitable[None]]
|
||||
set_fn: Callable[[OhmeApiClient, float], Coroutine[Any, Any, bool]]
|
||||
value_fn: Callable[[OhmeApiClient], float]
|
||||
|
||||
|
||||
@@ -31,7 +32,7 @@ NUMBER_DESCRIPTION = [
|
||||
key="target_percentage",
|
||||
translation_key="target_percentage",
|
||||
value_fn=lambda client: client.target_soc,
|
||||
set_fn=lambda client, value: client.async_set_target(target_percent=value),
|
||||
set_fn=lambda client, value: client.async_set_target(target_percent=int(value)),
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
@@ -42,7 +43,7 @@ NUMBER_DESCRIPTION = [
|
||||
translation_key="preconditioning_duration",
|
||||
value_fn=lambda client: client.preconditioning,
|
||||
set_fn=lambda client, value: client.async_set_target(
|
||||
pre_condition_length=value
|
||||
pre_condition_length=int(value)
|
||||
),
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
@@ -24,7 +24,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeSelectDescription(OhmeEntityDescription, SelectEntityDescription):
|
||||
"""Class to describe an Ohme select entity."""
|
||||
|
||||
select_fn: Callable[[OhmeApiClient, Any], Awaitable[None]]
|
||||
select_fn: Callable[[OhmeApiClient, Any], Coroutine[Any, Any, bool | None]]
|
||||
options: list[str] | None = None
|
||||
options_fn: Callable[[OhmeApiClient], list[str]] | None = None
|
||||
current_option_fn: Callable[[OhmeApiClient], str | None]
|
||||
|
||||
@@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0
|
||||
class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription):
|
||||
"""Class describing Ohme sensor entities."""
|
||||
|
||||
value_fn: Callable[[OhmeApiClient], str | int | float]
|
||||
value_fn: Callable[[OhmeApiClient], str | int | float | None]
|
||||
|
||||
|
||||
SENSOR_CHARGE_SESSION = [
|
||||
@@ -129,6 +129,6 @@ class OhmeSensor(OhmeEntity, SensorEntity):
|
||||
entity_description: OhmeSensorDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float:
|
||||
def native_value(self) -> str | int | float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.client)
|
||||
|
||||
@@ -78,7 +78,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""List of charge slots."""
|
||||
client = __get_client(service_call)
|
||||
|
||||
return {"slots": client.slots}
|
||||
return {"slots": [slot.to_dict() for slot in client.slots]}
|
||||
|
||||
async def set_price_cap(
|
||||
service_call: ServiceCall,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Platform for time."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, OhmeApiClient
|
||||
|
||||
@@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeTimeDescription(OhmeEntityDescription, TimeEntityDescription):
|
||||
"""Class describing Ohme time entities."""
|
||||
|
||||
set_fn: Callable[[OhmeApiClient, time], Awaitable[None]]
|
||||
set_fn: Callable[[OhmeApiClient, time], Coroutine[Any, Any, bool]]
|
||||
value_fn: Callable[[OhmeApiClient], time]
|
||||
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ from homeassistant.helpers import area_registry as ar
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
from homeassistant.helpers.system_info import async_get_system_info
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.setup import SetupPhases, async_pause_setup, async_setup_component
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import OnboardingData, OnboardingStorage, OnboardingStoreData
|
||||
@@ -60,7 +60,7 @@ async def async_setup(
|
||||
hass.http.register_view(BackupInfoView(data))
|
||||
hass.http.register_view(RestoreBackupView(data))
|
||||
hass.http.register_view(UploadBackupView(data))
|
||||
setup_cloud_views(hass, data)
|
||||
await setup_cloud_views(hass, data)
|
||||
|
||||
|
||||
class OnboardingView(HomeAssistantView):
|
||||
@@ -430,9 +430,19 @@ class UploadBackupView(BackupOnboardingView, backup_http.UploadBackupView):
|
||||
return await self._post(request)
|
||||
|
||||
|
||||
def setup_cloud_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
async def setup_cloud_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the cloud views."""
|
||||
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# Import the cloud integration in an executor to avoid blocking the
|
||||
# event loop.
|
||||
def import_cloud() -> None:
|
||||
"""Import the cloud integration."""
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from homeassistant.components.cloud import http_api # noqa: F401
|
||||
|
||||
await hass.async_add_import_executor_job(import_cloud)
|
||||
|
||||
# The cloud integration is imported locally to avoid cloud being imported by
|
||||
# bootstrap.py and to avoid circular imports.
|
||||
|
||||
|
||||
@@ -88,8 +88,8 @@ class OneDriveUpdateCoordinator(DataUpdateCoordinator[Drive]):
|
||||
),
|
||||
translation_key=key,
|
||||
translation_placeholders={
|
||||
"total": str(drive.quota.total),
|
||||
"used": str(drive.quota.used),
|
||||
"total": f"{drive.quota.total / (1024**3):.2f}",
|
||||
"used": f"{drive.quota.used / (1024**3):.2f}",
|
||||
},
|
||||
)
|
||||
return drive
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.26.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.26.5"]
|
||||
}
|
||||
|
||||
@@ -104,6 +104,15 @@ def _resize_image(image, opts):
|
||||
new_width = opts.max_width
|
||||
(old_width, old_height) = img.size
|
||||
old_size = len(image)
|
||||
|
||||
# If no max_width specified, only apply quality changes if requested
|
||||
if new_width is None:
|
||||
if opts.quality is None:
|
||||
return image
|
||||
imgbuf = io.BytesIO()
|
||||
img.save(imgbuf, "JPEG", optimize=True, quality=quality)
|
||||
return imgbuf.getvalue()
|
||||
|
||||
if old_width <= new_width:
|
||||
if opts.quality is None:
|
||||
_LOGGER.debug("Image is smaller-than/equal-to requested width")
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pvoutput",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pvo==2.2.0"]
|
||||
"requirements": ["pvo==2.2.1"]
|
||||
}
|
||||
|
||||
@@ -139,14 +139,13 @@ def query_circular_mean(table: type[StatisticsBase]) -> tuple[Label, Label]:
|
||||
# in Python.
|
||||
# https://en.wikipedia.org/wiki/Circular_mean
|
||||
radians = func.radians(table.mean)
|
||||
weighted_sum_sin = func.sum(func.sin(radians) * table.mean_weight)
|
||||
weighted_sum_cos = func.sum(func.cos(radians) * table.mean_weight)
|
||||
weight = func.sqrt(
|
||||
func.power(func.sum(func.sin(radians) * table.mean_weight), 2)
|
||||
+ func.power(func.sum(func.cos(radians) * table.mean_weight), 2)
|
||||
func.power(weighted_sum_sin, 2) + func.power(weighted_sum_cos, 2)
|
||||
)
|
||||
return (
|
||||
func.degrees(
|
||||
func.atan2(func.sum(func.sin(radians)), func.sum(func.cos(radians)))
|
||||
).label("mean"),
|
||||
func.degrees(func.atan2(weighted_sum_sin, weighted_sum_cos)).label("mean"),
|
||||
weight.label("mean_weight"),
|
||||
)
|
||||
|
||||
@@ -240,18 +239,20 @@ DEG_TO_RAD = math.pi / 180
|
||||
RAD_TO_DEG = 180 / math.pi
|
||||
|
||||
|
||||
def weighted_circular_mean(values: Iterable[tuple[float, float]]) -> float:
|
||||
"""Return the weighted circular mean of the values."""
|
||||
sin_sum = sum(math.sin(x * DEG_TO_RAD) * weight for x, weight in values)
|
||||
cos_sum = sum(math.cos(x * DEG_TO_RAD) * weight for x, weight in values)
|
||||
return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360
|
||||
def weighted_circular_mean(
|
||||
values: Iterable[tuple[float, float]],
|
||||
) -> tuple[float, float]:
|
||||
"""Return the weighted circular mean and the weight of the values."""
|
||||
weighted_sin_sum, weighted_cos_sum = 0.0, 0.0
|
||||
for x, weight in values:
|
||||
rad_x = x * DEG_TO_RAD
|
||||
weighted_sin_sum += math.sin(rad_x) * weight
|
||||
weighted_cos_sum += math.cos(rad_x) * weight
|
||||
|
||||
|
||||
def circular_mean(values: list[float]) -> float:
|
||||
"""Return the circular mean of the values."""
|
||||
sin_sum = sum(math.sin(x * DEG_TO_RAD) for x in values)
|
||||
cos_sum = sum(math.cos(x * DEG_TO_RAD) for x in values)
|
||||
return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360
|
||||
return (
|
||||
(RAD_TO_DEG * math.atan2(weighted_sin_sum, weighted_cos_sum)) % 360,
|
||||
math.sqrt(weighted_sin_sum**2 + weighted_cos_sum**2),
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -300,6 +301,7 @@ class StatisticsRow(BaseStatisticsRow, total=False):
|
||||
min: float | None
|
||||
max: float | None
|
||||
mean: float | None
|
||||
mean_weight: float | None
|
||||
change: float | None
|
||||
|
||||
|
||||
@@ -1023,7 +1025,7 @@ def _reduce_statistics(
|
||||
_want_sum = "sum" in types
|
||||
for statistic_id, stat_list in stats.items():
|
||||
max_values: list[float] = []
|
||||
mean_values: list[float] = []
|
||||
mean_values: list[tuple[float, float]] = []
|
||||
min_values: list[float] = []
|
||||
prev_stat: StatisticsRow = stat_list[0]
|
||||
fake_entry: StatisticsRow = {"start": stat_list[-1]["start"] + period_seconds}
|
||||
@@ -1039,12 +1041,15 @@ def _reduce_statistics(
|
||||
}
|
||||
if _want_mean:
|
||||
row["mean"] = None
|
||||
row["mean_weight"] = None
|
||||
if mean_values:
|
||||
match metadata[statistic_id][1]["mean_type"]:
|
||||
case StatisticMeanType.ARITHMETIC:
|
||||
row["mean"] = mean(mean_values)
|
||||
row["mean"] = mean([x[0] for x in mean_values])
|
||||
case StatisticMeanType.CIRCULAR:
|
||||
row["mean"] = circular_mean(mean_values)
|
||||
row["mean"], row["mean_weight"] = (
|
||||
weighted_circular_mean(mean_values)
|
||||
)
|
||||
mean_values.clear()
|
||||
if _want_min:
|
||||
row["min"] = min(min_values) if min_values else None
|
||||
@@ -1063,7 +1068,8 @@ def _reduce_statistics(
|
||||
max_values.append(_max)
|
||||
if _want_mean:
|
||||
if (_mean := statistic.get("mean")) is not None:
|
||||
mean_values.append(_mean)
|
||||
_mean_weight = statistic.get("mean_weight") or 0.0
|
||||
mean_values.append((_mean, _mean_weight))
|
||||
if _want_min and (_min := statistic.get("min")) is not None:
|
||||
min_values.append(_min)
|
||||
prev_stat = statistic
|
||||
@@ -1385,7 +1391,7 @@ def _get_max_mean_min_statistic(
|
||||
match metadata[1]["mean_type"]:
|
||||
case StatisticMeanType.CIRCULAR:
|
||||
if circular_means := max_mean_min["circular_means"]:
|
||||
mean_value = weighted_circular_mean(circular_means)
|
||||
mean_value = weighted_circular_mean(circular_means)[0]
|
||||
case StatisticMeanType.ARITHMETIC:
|
||||
if (mean_value := max_mean_min.get("mean_acc")) is not None and (
|
||||
duration := max_mean_min.get("duration")
|
||||
@@ -1739,12 +1745,12 @@ def statistic_during_period(
|
||||
|
||||
|
||||
_type_column_mapping = {
|
||||
"last_reset": "last_reset_ts",
|
||||
"max": "max",
|
||||
"mean": "mean",
|
||||
"min": "min",
|
||||
"state": "state",
|
||||
"sum": "sum",
|
||||
"last_reset": ("last_reset_ts",),
|
||||
"max": ("max",),
|
||||
"mean": ("mean", "mean_weight"),
|
||||
"min": ("min",),
|
||||
"state": ("state",),
|
||||
"sum": ("sum",),
|
||||
}
|
||||
|
||||
|
||||
@@ -1756,12 +1762,13 @@ def _generate_select_columns_for_types_stmt(
|
||||
track_on: list[str | None] = [
|
||||
table.__tablename__, # type: ignore[attr-defined]
|
||||
]
|
||||
for key, column in _type_column_mapping.items():
|
||||
if key in types:
|
||||
columns = columns.add_columns(getattr(table, column))
|
||||
track_on.append(column)
|
||||
else:
|
||||
track_on.append(None)
|
||||
for key, type_columns in _type_column_mapping.items():
|
||||
for column in type_columns:
|
||||
if key in types:
|
||||
columns = columns.add_columns(getattr(table, column))
|
||||
track_on.append(column)
|
||||
else:
|
||||
track_on.append(None)
|
||||
return lambda_stmt(lambda: columns, track_on=track_on)
|
||||
|
||||
|
||||
@@ -1944,6 +1951,12 @@ def _statistics_during_period_with_session(
|
||||
hass, session, start_time, units, _types, table, metadata, result
|
||||
)
|
||||
|
||||
# filter out mean_weight as it is only needed to reduce statistics
|
||||
# and not needed in the result
|
||||
for stats_rows in result.values():
|
||||
for row in stats_rows:
|
||||
row.pop("mean_weight", None)
|
||||
|
||||
# Return statistics combined with metadata
|
||||
return result
|
||||
|
||||
@@ -2391,7 +2404,12 @@ def _sorted_statistics_to_dict(
|
||||
field_map["last_reset"] = field_map.pop("last_reset_ts")
|
||||
sum_idx = field_map["sum"] if "sum" in types else None
|
||||
sum_only = len(types) == 1 and sum_idx is not None
|
||||
row_mapping = tuple((key, field_map[key]) for key in types if key in field_map)
|
||||
row_mapping = tuple(
|
||||
(column, field_map[column])
|
||||
for key in types
|
||||
for column in ({key, *_type_column_mapping.get(key, ())})
|
||||
if column in field_map
|
||||
)
|
||||
# Append all statistic entries, and optionally do unit conversion
|
||||
table_duration_seconds = table.duration.total_seconds()
|
||||
for meta_id, db_rows in stats_by_meta_id.items():
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Config flow for Remote Calendar integration."""
|
||||
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -50,6 +51,13 @@ class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = get_async_client(self.hass)
|
||||
try:
|
||||
res = await client.get(user_input[CONF_URL], follow_redirects=True)
|
||||
if res.status_code == HTTPStatus.FORBIDDEN:
|
||||
errors["base"] = "forbidden"
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
res.raise_for_status()
|
||||
except (HTTPError, InvalidURL) as err:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==9.0.1"]
|
||||
"requirements": ["ical==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"forbidden": "The server understood the request but refuses to authorize it.",
|
||||
"invalid_ics_file": "[%key:component::local_calendar::config::error::invalid_ics_file%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
"samsungctl[websocket]==0.7.1",
|
||||
"samsungtvws[async,encrypted]==2.7.2",
|
||||
"wakeonlan==2.1.0",
|
||||
"async-upnp-client==0.43.0"
|
||||
"async-upnp-client==0.44.0"
|
||||
],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user