mirror of
https://github.com/home-assistant/core.git
synced 2026-03-20 17:54:51 +01:00
Compare commits
81 Commits
remove_use
...
todo_trigg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a861827d9a | ||
|
|
da0d0186b5 | ||
|
|
0de2e689f1 | ||
|
|
21d06fdace | ||
|
|
c8cf13ba19 | ||
|
|
d9a29bd486 | ||
|
|
bd0145cb8d | ||
|
|
d002b48335 | ||
|
|
c66daf13d3 | ||
|
|
1cae0e3cd3 | ||
|
|
de93d1d52a | ||
|
|
c67438c515 | ||
|
|
fa57f72f37 | ||
|
|
29309d1315 | ||
|
|
130e0db742 | ||
|
|
450d46f652 | ||
|
|
625603839c | ||
|
|
fb66d766a8 | ||
|
|
e5f13b4126 | ||
|
|
4a22f2c93e | ||
|
|
a5c48b190a | ||
|
|
5e1a0e2152 | ||
|
|
9a5516bb1d | ||
|
|
b9172cf4a8 | ||
|
|
8e4dc29226 | ||
|
|
b152f2f9a6 | ||
|
|
abca80dc13 | ||
|
|
6869369ab2 | ||
|
|
c2dde06713 | ||
|
|
e455c05721 | ||
|
|
085df1de19 | ||
|
|
91a1237965 | ||
|
|
680a6bc856 | ||
|
|
152912c258 | ||
|
|
40e8a1b11a | ||
|
|
69dc354669 | ||
|
|
bbe1bf14ae | ||
|
|
5470d8f8a7 | ||
|
|
99fe4b10d0 | ||
|
|
886b6b08ac | ||
|
|
6a1e7c1cca | ||
|
|
d17df13055 | ||
|
|
fdc8fd3ca8 | ||
|
|
5217dde1b9 | ||
|
|
f73502c77a | ||
|
|
2ebfcfa070 | ||
|
|
be66e3c7e9 | ||
|
|
2c37a86bc9 | ||
|
|
fa8e976de7 | ||
|
|
877bca28ad | ||
|
|
a57c65f512 | ||
|
|
7140826dbb | ||
|
|
5fea8d69d7 | ||
|
|
98e3b9962e | ||
|
|
afe19147f8 | ||
|
|
0e7c25488c | ||
|
|
412e85203d | ||
|
|
55ec4a95fd | ||
|
|
6ea9e9a161 | ||
|
|
b56e6d1ff7 | ||
|
|
b502cdd15b | ||
|
|
b7ba85192d | ||
|
|
04d45c8ada | ||
|
|
ba0804fefa | ||
|
|
538b817bf1 | ||
|
|
ed482f4a15 | ||
|
|
7efa2d3cac | ||
|
|
3f872fd196 | ||
|
|
8a8b2a9b82 | ||
|
|
12930a6670 | ||
|
|
0709e053c0 | ||
|
|
df9cb4d35d | ||
|
|
44dcdc53b5 | ||
|
|
8f62e2334e | ||
|
|
692265cec3 | ||
|
|
dd437bf822 | ||
|
|
32a5d8965c | ||
|
|
cbe1ec6f3e | ||
|
|
d5a6283f4f | ||
|
|
ebdbe25751 | ||
|
|
33355a1bf1 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -182,7 +182,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -490,7 +490,7 @@ jobs:
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -978,7 +978,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1387,7 +1387,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1558,7 +1558,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1587,7 +1587,7 @@ jobs:
|
||||
&& needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
10
.github/workflows/wheels.yml
vendored
10
.github/workflows/wheels.yml
vendored
@@ -121,12 +121,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -172,17 +172,17 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
|
||||
@@ -173,6 +173,7 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.dropbox.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
|
||||
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -397,6 +397,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/dropbox/ @bdr99
|
||||
/tests/components/dropbox/ @bdr99
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
@@ -1561,8 +1563,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
|
||||
/tests/components/smarla/ @explicatis @rlint-explicatis
|
||||
/homeassistant/components/smarla/ @explicatis @johannes-exp
|
||||
/tests/components/smarla/ @explicatis @johannes-exp
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
@@ -1829,8 +1831,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vegehub/ @thulrus
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/homeassistant/components/velux/ @Julius2342 @pawlizio @wollew
|
||||
/tests/components/velux/ @Julius2342 @pawlizio @wollew
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
@@ -1913,6 +1915,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/whois/ @frenck
|
||||
/homeassistant/components/wiffi/ @mampfes
|
||||
/tests/components/wiffi/ @mampfes
|
||||
/homeassistant/components/wiim/ @Linkplay2020
|
||||
/tests/components/wiim/ @Linkplay2020
|
||||
/homeassistant/components/wilight/ @leofig-rj
|
||||
/tests/components/wilight/ @leofig-rj
|
||||
/homeassistant/components/window/ @home-assistant/core
|
||||
|
||||
@@ -120,7 +120,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="timeout",
|
||||
)
|
||||
del self.login_task
|
||||
self.login_task = None
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_reauth(
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["actron-neo-api==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ rules:
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -135,6 +135,7 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"motion",
|
||||
"occupancy",
|
||||
"person",
|
||||
"schedule",
|
||||
"siren",
|
||||
"switch",
|
||||
"vacuum",
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from ..const import LOGGER
|
||||
from ..errors import AuthenticationRequired, CannotConnect
|
||||
@@ -26,7 +26,7 @@ async def get_axis_api(
|
||||
config: Mapping[str, Any],
|
||||
) -> axis.AxisDevice:
|
||||
"""Create a Axis device API."""
|
||||
session = get_async_client(hass, verify_ssl=False)
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
api = axis.AxisDevice(
|
||||
Configuration(
|
||||
|
||||
@@ -32,7 +32,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_PASSKEY, DOMAIN
|
||||
from .const import CONF_PASSKEY, DOMAIN, LOGGER
|
||||
from .coordinator import BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -52,7 +52,7 @@ class BSBLanData:
|
||||
client: BSBLAN
|
||||
device: Device
|
||||
info: Info
|
||||
static: StaticState
|
||||
static: StaticState | None
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -82,11 +82,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
# the connection by fetching firmware version
|
||||
await bsblan.initialize()
|
||||
|
||||
# Fetch device metadata in parallel for faster startup
|
||||
device, info, static = await asyncio.gather(
|
||||
# Fetch required device metadata in parallel for faster startup
|
||||
device, info = await asyncio.gather(
|
||||
bsblan.device(),
|
||||
bsblan.info(),
|
||||
bsblan.static_values(),
|
||||
)
|
||||
except BSBLANConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -111,6 +110,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
translation_key="setup_general_error",
|
||||
) from err
|
||||
|
||||
try:
|
||||
static = await bsblan.static_values()
|
||||
except (BSBLANError, TimeoutError) as err:
|
||||
LOGGER.debug(
|
||||
"Static values not available for %s: %s",
|
||||
entry.data[CONF_HOST],
|
||||
err,
|
||||
)
|
||||
static = None
|
||||
|
||||
# Create coordinators with the already-initialized client
|
||||
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
|
||||
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
|
||||
|
||||
@@ -90,10 +90,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||
|
||||
# Set temperature range if available, otherwise use Home Assistant defaults
|
||||
if data.static.min_temp is not None and data.static.min_temp.value is not None:
|
||||
self._attr_min_temp = data.static.min_temp.value
|
||||
if data.static.max_temp is not None and data.static.max_temp.value is not None:
|
||||
self._attr_max_temp = data.static.max_temp.value
|
||||
if (static := data.static) is not None:
|
||||
if (min_temp := static.min_temp) is not None and min_temp.value is not None:
|
||||
self._attr_min_temp = min_temp.value
|
||||
if (max_temp := static.max_temp) is not None and max_temp.value is not None:
|
||||
self._attr_max_temp = max_temp.value
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
|
||||
@property
|
||||
|
||||
@@ -183,90 +183,122 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
existing_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is None:
|
||||
# Preserve existing values as defaults
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PASSKEY,
|
||||
default=existing_entry.data.get(
|
||||
CONF_PASSKEY, vol.UNDEFINED
|
||||
),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=existing_entry.data.get(
|
||||
CONF_USERNAME, vol.UNDEFINED
|
||||
),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=vol.UNDEFINED,
|
||||
): str,
|
||||
}
|
||||
),
|
||||
data_schema=self._build_credentials_schema(existing_entry.data),
|
||||
)
|
||||
|
||||
# Combine existing data with the user's new input for validation.
|
||||
# This correctly handles adding, changing, and clearing credentials.
|
||||
config_data = existing_entry.data.copy()
|
||||
config_data.update(user_input)
|
||||
# Merge existing data with user input for validation
|
||||
validate_data = {**existing_entry.data, **user_input}
|
||||
errors = await self._async_validate_credentials(validate_data)
|
||||
|
||||
self.host = config_data[CONF_HOST]
|
||||
self.port = config_data[CONF_PORT]
|
||||
self.passkey = config_data.get(CONF_PASSKEY)
|
||||
self.username = config_data.get(CONF_USERNAME)
|
||||
self.password = config_data.get(CONF_PASSWORD)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self._build_credentials_schema(user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
existing_entry, data_updates=user_input, reason="reauth_successful"
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration flow."""
|
||||
existing_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self._build_connection_schema(existing_entry.data),
|
||||
)
|
||||
|
||||
# Merge existing data with user input for validation
|
||||
validate_data = {**existing_entry.data, **user_input}
|
||||
errors = await self._async_validate_credentials(validate_data)
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self._build_connection_schema(user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
# Prevent reconfiguring to a different physical device
|
||||
# it gets the unique ID from the device info when it validates credentials
|
||||
self._abort_if_unique_id_mismatch()
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
existing_entry,
|
||||
data_updates=user_input,
|
||||
reason="reconfigure_successful",
|
||||
)
|
||||
|
||||
async def _async_validate_credentials(self, data: dict[str, Any]) -> dict[str, str]:
|
||||
"""Validate connection credentials and return errors dict."""
|
||||
self.host = data[CONF_HOST]
|
||||
self.port = data.get(CONF_PORT, DEFAULT_PORT)
|
||||
self.passkey = data.get(CONF_PASSKEY)
|
||||
self.username = data.get(CONF_USERNAME)
|
||||
self.password = data.get(CONF_PASSWORD)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await self._get_bsblan_info(raise_on_progress=False, is_reauth=True)
|
||||
except BSBLANAuthError:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PASSKEY,
|
||||
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=vol.UNDEFINED,
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "invalid_auth"},
|
||||
)
|
||||
errors["base"] = "invalid_auth"
|
||||
except BSBLANError:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PASSKEY,
|
||||
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=vol.UNDEFINED,
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
)
|
||||
errors["base"] = "cannot_connect"
|
||||
return errors
|
||||
|
||||
# Update only the fields that were provided by the user
|
||||
return self.async_update_reload_and_abort(
|
||||
existing_entry, data_updates=user_input, reason="reauth_successful"
|
||||
@callback
|
||||
def _build_credentials_schema(self, defaults: Mapping[str, Any]) -> vol.Schema:
|
||||
"""Build schema for credentials-only forms (reauth)."""
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_PASSKEY,
|
||||
default=defaults.get(CONF_PASSKEY) or vol.UNDEFINED,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=defaults.get(CONF_USERNAME) or vol.UNDEFINED,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=vol.UNDEFINED,
|
||||
): str,
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def _build_connection_schema(self, defaults: Mapping[str, Any]) -> vol.Schema:
|
||||
"""Build schema for full connection forms (user and reconfigure)."""
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST,
|
||||
default=defaults.get(CONF_HOST, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PORT,
|
||||
default=defaults.get(CONF_PORT, DEFAULT_PORT),
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_PASSKEY,
|
||||
default=defaults.get(CONF_PASSKEY) or vol.UNDEFINED,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=defaults.get(CONF_USERNAME) or vol.UNDEFINED,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=vol.UNDEFINED,
|
||||
): str,
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -274,32 +306,9 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, errors: dict | None = None, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the setup form to the user."""
|
||||
# Preserve user input if provided, otherwise use defaults
|
||||
defaults = user_input or {}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=defaults.get(CONF_HOST, vol.UNDEFINED)
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PORT, default=defaults.get(CONF_PORT, DEFAULT_PORT)
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_PASSKEY, default=defaults.get(CONF_PASSKEY, vol.UNDEFINED)
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=defaults.get(CONF_USERNAME, vol.UNDEFINED),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default=defaults.get(CONF_PASSWORD, vol.UNDEFINED),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
data_schema=self._build_connection_schema(user_input or {}),
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"sensor": data.fast_coordinator.data.sensor.model_dump(),
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump(),
|
||||
},
|
||||
"static": data.static.model_dump(),
|
||||
"static": data.static.model_dump() if data.static is not None else None,
|
||||
}
|
||||
|
||||
# Add DHW config and schedule from slow coordinator if available
|
||||
|
||||
@@ -58,7 +58,7 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "The device you are trying to reconfigure is not the same as the one previously configured."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -39,6 +41,24 @@
|
||||
"description": "The BSB-LAN integration needs to re-authenticate with {name}",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::bsblan::config::step::user::data_description::host%]",
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]",
|
||||
"port": "[%key:component::bsblan::config::step::user::data_description::port%]",
|
||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]"
|
||||
},
|
||||
"description": "Update connection settings for your BSB-LAN device.",
|
||||
"title": "Reconfigure BSB-LAN"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
|
||||
64
homeassistant/components/dropbox/__init__.py
Normal file
64
homeassistant/components/dropbox/__init__.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""The Dropbox integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from python_dropbox_api import (
|
||||
DropboxAPIClient,
|
||||
DropboxAuthException,
|
||||
DropboxUnknownException,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
|
||||
from .auth import DropboxConfigEntryAuth
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
type DropboxConfigEntry = ConfigEntry[DropboxAPIClient]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DropboxConfigEntry) -> bool:
|
||||
"""Set up Dropbox from a config entry."""
|
||||
try:
|
||||
oauth2_implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
oauth2_session = OAuth2Session(hass, entry, oauth2_implementation)
|
||||
|
||||
auth = DropboxConfigEntryAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), oauth2_session
|
||||
)
|
||||
|
||||
client = DropboxAPIClient(auth)
|
||||
|
||||
try:
|
||||
await client.get_account_info()
|
||||
except DropboxAuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (DropboxUnknownException, TimeoutError) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
entry.runtime_data = client
|
||||
|
||||
def async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DropboxConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
38
homeassistant/components/dropbox/application_credentials.py
Normal file
38
homeassistant/components/dropbox/application_credentials.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Application credentials platform for the Dropbox integration."""
|
||||
|
||||
from homeassistant.components.application_credentials import ClientCredential
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AbstractOAuth2Implementation,
|
||||
LocalOAuth2ImplementationWithPkce,
|
||||
)
|
||||
|
||||
from .const import OAUTH2_AUTHORIZE, OAUTH2_SCOPES, OAUTH2_TOKEN
|
||||
|
||||
|
||||
async def async_get_auth_implementation(
|
||||
hass: HomeAssistant, auth_domain: str, credential: ClientCredential
|
||||
) -> AbstractOAuth2Implementation:
|
||||
"""Return custom auth implementation."""
|
||||
return DropboxOAuth2Implementation(
|
||||
hass,
|
||||
auth_domain,
|
||||
credential.client_id,
|
||||
OAUTH2_AUTHORIZE,
|
||||
OAUTH2_TOKEN,
|
||||
credential.client_secret,
|
||||
)
|
||||
|
||||
|
||||
class DropboxOAuth2Implementation(LocalOAuth2ImplementationWithPkce):
|
||||
"""Custom Dropbox OAuth2 implementation to add the necessary authorize url parameters."""
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
data: dict = {
|
||||
"token_access_type": "offline",
|
||||
"scope": " ".join(OAUTH2_SCOPES),
|
||||
}
|
||||
data.update(super().extra_authorize_data)
|
||||
return data
|
||||
44
homeassistant/components/dropbox/auth.py
Normal file
44
homeassistant/components/dropbox/auth.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Authentication for Dropbox."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from python_dropbox_api import Auth
|
||||
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||
|
||||
|
||||
class DropboxConfigEntryAuth(Auth):
|
||||
"""Provide Dropbox authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
websession: ClientSession,
|
||||
oauth_session: OAuth2Session,
|
||||
) -> None:
|
||||
"""Initialize DropboxConfigEntryAuth."""
|
||||
super().__init__(websession)
|
||||
self._oauth_session = oauth_session
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return a valid access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
class DropboxConfigFlowAuth(Auth):
|
||||
"""Provide authentication tied to a fixed token for the config flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
websession: ClientSession,
|
||||
token: str,
|
||||
) -> None:
|
||||
"""Initialize DropboxConfigFlowAuth."""
|
||||
super().__init__(websession)
|
||||
self._token = token
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return the fixed access token."""
|
||||
return self._token
|
||||
230
homeassistant/components/dropbox/backup.py
Normal file
230
homeassistant/components/dropbox/backup.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""Backup platform for the Dropbox integration."""
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from python_dropbox_api import (
|
||||
DropboxAPIClient,
|
||||
DropboxAuthException,
|
||||
DropboxFileOrFolderNotFoundException,
|
||||
DropboxUnknownException,
|
||||
)
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import DropboxConfigEntry
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
async def _async_string_iterator(content: str) -> AsyncIterator[bytes]:
|
||||
"""Yield a string as a single bytes chunk."""
|
||||
yield content.encode()
|
||||
|
||||
|
||||
def handle_backup_errors[_R, **P](
|
||||
func: Callable[Concatenate[DropboxBackupAgent, P], Coroutine[Any, Any, _R]],
|
||||
) -> Callable[Concatenate[DropboxBackupAgent, P], Coroutine[Any, Any, _R]]:
|
||||
"""Handle backup errors."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(
|
||||
self: DropboxBackupAgent, *args: P.args, **kwargs: P.kwargs
|
||||
) -> _R:
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
except DropboxFileOrFolderNotFoundException as err:
|
||||
raise BackupNotFound(
|
||||
f"Failed to {func.__name__.removeprefix('async_').replace('_', ' ')}"
|
||||
) from err
|
||||
except DropboxAuthException as err:
|
||||
self._entry.async_start_reauth(self._hass)
|
||||
raise BackupAgentError("Authentication error") from err
|
||||
except DropboxUnknownException as err:
|
||||
_LOGGER.error(
|
||||
"Error during %s: %s",
|
||||
func.__name__,
|
||||
err,
|
||||
)
|
||||
_LOGGER.debug("Full error: %s", err, exc_info=True)
|
||||
raise BackupAgentError(
|
||||
f"Failed to {func.__name__.removeprefix('async_').replace('_', ' ')}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
entries = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
return [DropboxBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
class DropboxBackupAgent(BackupAgent):
|
||||
"""Backup agent for the Dropbox integration."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: DropboxConfigEntry) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
self.name = entry.title
|
||||
assert entry.unique_id
|
||||
self.unique_id = entry.unique_id
|
||||
self._api: DropboxAPIClient = entry.runtime_data
|
||||
|
||||
async def _async_get_backups(self) -> list[tuple[AgentBackup, str]]:
|
||||
"""Get backups and their corresponding file names."""
|
||||
files = await self._api.list_folder("")
|
||||
|
||||
tar_files = {f.name for f in files if f.name.endswith(".tar")}
|
||||
metadata_files = [f for f in files if f.name.endswith(".metadata.json")]
|
||||
|
||||
backups: list[tuple[AgentBackup, str]] = []
|
||||
for metadata_file in metadata_files:
|
||||
tar_name = metadata_file.name.removesuffix(".metadata.json") + ".tar"
|
||||
if tar_name not in tar_files:
|
||||
_LOGGER.warning(
|
||||
"Found metadata file '%s' without matching backup file",
|
||||
metadata_file.name,
|
||||
)
|
||||
continue
|
||||
|
||||
metadata_stream = self._api.download_file(f"/{metadata_file.name}")
|
||||
raw = b"".join([chunk async for chunk in metadata_stream])
|
||||
try:
|
||||
data = json.loads(raw)
|
||||
backup = AgentBackup.from_dict(data)
|
||||
except (json.JSONDecodeError, ValueError, TypeError, KeyError) as err:
|
||||
_LOGGER.warning(
|
||||
"Skipping invalid metadata file '%s': %s",
|
||||
metadata_file.name,
|
||||
err,
|
||||
)
|
||||
continue
|
||||
backups.append((backup, tar_name))
|
||||
|
||||
return backups
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
backup_filename, metadata_filename = _suggested_filenames(backup)
|
||||
backup_path = f"/{backup_filename}"
|
||||
metadata_path = f"/{metadata_filename}"
|
||||
|
||||
file_stream = await open_stream()
|
||||
await self._api.upload_file(backup_path, file_stream)
|
||||
|
||||
metadata_stream = _async_string_iterator(json.dumps(backup.as_dict()))
|
||||
|
||||
try:
|
||||
await self._api.upload_file(metadata_path, metadata_stream)
|
||||
except (
|
||||
DropboxAuthException,
|
||||
DropboxUnknownException,
|
||||
):
|
||||
await self._api.delete_file(backup_path)
|
||||
raise
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
return [backup for backup, _ in await self._async_get_backups()]
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
backups = await self._async_get_backups()
|
||||
for backup, filename in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
return self._api.download_file(f"/{filename}")
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
backups = await self._async_get_backups()
|
||||
|
||||
for backup, _ in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
backups = await self._async_get_backups()
|
||||
for backup, tar_filename in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
metadata_filename = tar_filename.removesuffix(".tar") + ".metadata.json"
|
||||
await self._api.delete_file(f"/{tar_filename}")
|
||||
await self._api.delete_file(f"/{metadata_filename}")
|
||||
return
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
60
homeassistant/components/dropbox/config_flow.py
Normal file
60
homeassistant/components/dropbox/config_flow.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Config flow for Dropbox."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from python_dropbox_api import DropboxAPIClient
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
|
||||
|
||||
from .auth import DropboxConfigFlowAuth
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class DropboxConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"""Config flow to handle Dropbox OAuth2 authentication."""
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return logger."""
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow, or update existing entry."""
|
||||
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
|
||||
auth = DropboxConfigFlowAuth(async_get_clientsession(self.hass), access_token)
|
||||
|
||||
client = DropboxAPIClient(auth)
|
||||
account_info = await client.get_account_info()
|
||||
|
||||
await self.async_set_unique_id(account_info.account_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title=account_info.email, data=data)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return await self.async_step_user()
|
||||
19
homeassistant/components/dropbox/const.py
Normal file
19
homeassistant/components/dropbox/const.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Constants for the Dropbox integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN = "dropbox"
|
||||
|
||||
OAUTH2_AUTHORIZE = "https://www.dropbox.com/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.dropboxapi.com/oauth2/token"
|
||||
OAUTH2_SCOPES = [
|
||||
"account_info.read",
|
||||
"files.content.read",
|
||||
"files.content.write",
|
||||
]
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
13
homeassistant/components/dropbox/manifest.json
Normal file
13
homeassistant/components/dropbox/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "dropbox",
|
||||
"name": "Dropbox",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@bdr99"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dropbox",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-dropbox-api==0.1.3"]
|
||||
}
|
||||
112
homeassistant/components/dropbox/quality_scale.yaml
Normal file
112
homeassistant/components/dropbox/quality_scale.yaml
Normal file
@@ -0,0 +1,112 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register any actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: Integration does not poll.
|
||||
brands: done
|
||||
common-modules:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities or coordinators.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register any actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register any actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration does not have any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: Integration does not make any entity updates.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: Integration does not have any data to diagnose.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Integration is a service.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Integration is a service.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: Integration does not update any data.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: Integration only provides backup functionality.
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: Integration does not support any devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Integration does not use any devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: Integration does not have any entities.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not have any repairs.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Integration does not have any devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
35
homeassistant/components/dropbox/strings.json
Normal file
35
homeassistant/components/dropbox/strings.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"wrong_account": "Wrong account: Please authenticate with the correct account."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "The Dropbox integration needs to re-authenticate your account.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,23 @@
|
||||
"alarm_sound_mode": {
|
||||
"default": "mdi:alarm"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"alarm_sound_mode": {
|
||||
"default": "mdi:alarm"
|
||||
},
|
||||
"last_alarm_type_code": {
|
||||
"default": "mdi:alarm"
|
||||
},
|
||||
"last_alarm_type_name": {
|
||||
"default": "mdi:alarm"
|
||||
},
|
||||
"local_ip": {
|
||||
"default": "mdi:ip"
|
||||
},
|
||||
"wan_ip": {
|
||||
"default": "mdi:ip"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -27,6 +27,7 @@ class FullyButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Fully Kiosk Browser button description."""
|
||||
|
||||
press_action: Callable[[FullyKiosk], Any]
|
||||
refresh_after_press: bool = True
|
||||
|
||||
|
||||
BUTTONS: tuple[FullyButtonEntityDescription, ...] = (
|
||||
@@ -68,6 +69,13 @@ BUTTONS: tuple[FullyButtonEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda fully: fully.clearCache(),
|
||||
),
|
||||
FullyButtonEntityDescription(
|
||||
key="triggerMotion",
|
||||
translation_key="trigger_motion",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda fully: fully.triggerMotion(),
|
||||
refresh_after_press=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -102,4 +110,5 @@ class FullyButtonEntity(FullyKioskEntity, ButtonEntity):
|
||||
async def async_press(self) -> None:
|
||||
"""Set the value of the entity."""
|
||||
await self.entity_description.press_action(self.coordinator.fully)
|
||||
await self.coordinator.async_refresh()
|
||||
if self.entity_description.refresh_after_press:
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -88,6 +88,9 @@
|
||||
},
|
||||
"to_foreground": {
|
||||
"name": "Bring to foreground"
|
||||
},
|
||||
"trigger_motion": {
|
||||
"name": "Trigger motion activity"
|
||||
}
|
||||
},
|
||||
"image": {
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
@@ -30,11 +31,17 @@ _PLATFORMS = (Platform.SENSOR,)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry) -> bool:
|
||||
"""Set up Google Drive from a config entry."""
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
|
||||
auth = AsyncConfigEntryAuth(
|
||||
async_get_clientsession(hass),
|
||||
OAuth2Session(
|
||||
hass, entry, await async_get_config_entry_implementation(hass, entry)
|
||||
),
|
||||
OAuth2Session(hass, entry, implementation),
|
||||
)
|
||||
|
||||
# Test we can refresh the token and raise ConfigEntryAuthFailed or ConfigEntryNotReady if not
|
||||
@@ -46,7 +53,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
try:
|
||||
folder_id, _ = await client.async_create_ha_root_folder_if_not_exists()
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_get_folder",
|
||||
translation_placeholders={"folder": "Home Assistant"},
|
||||
) from err
|
||||
|
||||
def async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
|
||||
@@ -22,6 +22,8 @@ from homeassistant.exceptions import (
|
||||
)
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_UPLOAD_AND_DOWNLOAD_TIMEOUT = 12 * 3600
|
||||
_UPLOAD_MAX_RETRIES = 20
|
||||
|
||||
@@ -61,14 +63,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
):
|
||||
if isinstance(ex, ClientResponseError) and 400 <= ex.status < 500:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"OAuth session is not valid, reauth required"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_not_valid",
|
||||
) from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from ex
|
||||
if hasattr(ex, "status") and ex.status == 400:
|
||||
self._oauth_session.config_entry.async_start_reauth(
|
||||
self._oauth_session.hass
|
||||
)
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from ex
|
||||
return str(self._oauth_session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,11 @@ from typing import Any, cast
|
||||
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers import config_entry_oauth2_flow, instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -44,6 +48,12 @@ class OAuth2FlowHandler(
|
||||
"prompt": "consent",
|
||||
}
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -81,13 +91,16 @@ class OAuth2FlowHandler(
|
||||
|
||||
await self.async_set_unique_id(email_address)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if self.source in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
|
||||
if self.source == SOURCE_REAUTH:
|
||||
entry = self._get_reauth_entry()
|
||||
else:
|
||||
entry = self._get_reconfigure_entry()
|
||||
self._abort_if_unique_id_mismatch(
|
||||
reason="wrong_account",
|
||||
description_placeholders={"email": cast(str, reauth_entry.unique_id)},
|
||||
description_placeholders={"email": cast(str, entry.unique_id)},
|
||||
)
|
||||
return self.async_update_reload_and_abort(reauth_entry, data=data)
|
||||
return self.async_update_reload_and_abort(entry, data=data)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
|
||||
@@ -17,9 +17,7 @@ rules:
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
@@ -66,12 +64,8 @@ rules:
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: No configuration options.
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repairs.
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"wrong_account": "Wrong account: Please authenticate with {email}."
|
||||
@@ -62,5 +63,22 @@
|
||||
"name": "Used storage in Drive Trash"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed"
|
||||
},
|
||||
"authentication_not_valid": {
|
||||
"message": "OAuth session is not valid, reauthentication required"
|
||||
},
|
||||
"failed_to_get_folder": {
|
||||
"message": "Failed to get {folder} folder"
|
||||
},
|
||||
"invalid_response_google_drive_error": {
|
||||
"message": "Invalid response from Google Drive: {error}"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,8 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar(
|
||||
@@ -97,7 +99,13 @@ class GoogleWeatherBaseCoordinator(TimestampDataUpdateCoordinator[T]):
|
||||
self.subentry.title,
|
||||
err,
|
||||
)
|
||||
raise UpdateFailed(f"Error fetching {self._data_type_name}") from err
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
|
||||
class GoogleWeatherCurrentConditionsCoordinator(
|
||||
|
||||
@@ -66,7 +66,7 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
|
||||
@@ -98,5 +98,10 @@
|
||||
"name": "Wind gust speed"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"update_error": {
|
||||
"message": "Error fetching weather data: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -372,7 +372,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Updating time segments requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -388,7 +389,11 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
enabled,
|
||||
)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
raise HomeAssistantError(f"API error updating time segment: {err}") from err
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
# Update coordinator's cached data without making an API call (avoids rate limit)
|
||||
if self.data:
|
||||
@@ -411,7 +416,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Reading time segments requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
# Ensure we have current data
|
||||
@@ -496,7 +502,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Updating AC charge times requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -510,7 +517,9 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
raise HomeAssistantError(
|
||||
f"API error updating AC charge times: {err}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
if self.data:
|
||||
@@ -544,7 +553,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Updating AC discharge times requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -557,7 +567,9 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
raise HomeAssistantError(
|
||||
f"API error updating AC discharge times: {err}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
if self.data:
|
||||
@@ -579,7 +591,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Read AC charge time settings from SPH device cache."""
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Reading AC charge times requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
if not self.data:
|
||||
@@ -591,7 +604,8 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Read AC discharge time settings from SPH device cache."""
|
||||
if self.api_version != "v1":
|
||||
raise ServiceValidationError(
|
||||
"Reading AC discharge times requires token authentication"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_auth_required",
|
||||
)
|
||||
|
||||
if not self.data:
|
||||
|
||||
65
homeassistant/components/growatt_server/diagnostics.py
Normal file
65
homeassistant/components/growatt_server/diagnostics.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Diagnostics support for Growatt Server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_UNIQUE_ID, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_PLANT_ID
|
||||
from .coordinator import GrowattConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_USERNAME,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_PLANT_ID,
|
||||
"user_id",
|
||||
"deviceSn",
|
||||
"device_sn",
|
||||
}
|
||||
|
||||
# Allowlist of safe telemetry fields from the total coordinator.
|
||||
# Monetary fields (plantMoneyText, totalMoneyText, currency) are intentionally
|
||||
# excluded to avoid leaking financial data under unpredictable key names.
|
||||
_TOTAL_SAFE_KEYS = frozenset(
|
||||
{
|
||||
# Classic API keys
|
||||
"todayEnergy",
|
||||
"totalEnergy",
|
||||
"invTodayPpv",
|
||||
"nominalPower",
|
||||
# V1 API keys (aliases used after normalisation in coordinator)
|
||||
"today_energy",
|
||||
"total_energy",
|
||||
"current_power",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: GrowattConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
runtime_data = config_entry.runtime_data
|
||||
total_data = runtime_data.total_coordinator.data or {}
|
||||
return async_redact_data(
|
||||
{
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"total_coordinator": {
|
||||
k: v for k, v in total_data.items() if k in _TOTAL_SAFE_KEYS
|
||||
},
|
||||
"devices": [
|
||||
{
|
||||
"device_sn": device_sn,
|
||||
"device_type": coordinator.device_type,
|
||||
"data": coordinator.data,
|
||||
}
|
||||
for device_sn, coordinator in runtime_data.devices.items()
|
||||
],
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
||||
@@ -158,7 +158,11 @@ class GrowattNumber(CoordinatorEntity[GrowattCoordinator], NumberEntity):
|
||||
int_value,
|
||||
)
|
||||
except GrowattV1ApiError as e:
|
||||
raise HomeAssistantError(f"Error while setting parameter: {e}") from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(e)},
|
||||
) from e
|
||||
|
||||
# If no exception was raised, the write was successful
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -33,7 +33,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
@@ -48,7 +48,7 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
|
||||
@@ -46,15 +46,20 @@ def _get_coordinator(
|
||||
|
||||
if not coordinators:
|
||||
raise ServiceValidationError(
|
||||
f"No {device_type.upper()} devices with token authentication are configured. "
|
||||
f"Services require {device_type.upper()} devices with V1 API access."
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_devices_configured",
|
||||
translation_placeholders={"device_type": device_type.upper()},
|
||||
)
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
|
||||
if not device_entry:
|
||||
raise ServiceValidationError(f"Device '{device_id}' not found")
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_not_found",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
serial_number = None
|
||||
for identifier in device_entry.identifiers:
|
||||
@@ -63,11 +68,20 @@ def _get_coordinator(
|
||||
break
|
||||
|
||||
if not serial_number:
|
||||
raise ServiceValidationError(f"Device '{device_id}' is not a Growatt device")
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_not_growatt",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
if serial_number not in coordinators:
|
||||
raise ServiceValidationError(
|
||||
f"{device_type.upper()} device '{serial_number}' not found or not configured for services"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_not_configured",
|
||||
translation_placeholders={
|
||||
"device_type": device_type.upper(),
|
||||
"serial_number": serial_number,
|
||||
},
|
||||
)
|
||||
|
||||
return coordinators[serial_number]
|
||||
@@ -78,13 +92,17 @@ def _parse_time_str(time_str: str, field_name: str) -> time:
|
||||
parts = time_str.split(":")
|
||||
if len(parts) not in (2, 3):
|
||||
raise ServiceValidationError(
|
||||
f"{field_name} must be in HH:MM or HH:MM:SS format"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_time_format",
|
||||
translation_placeholders={"field_name": field_name},
|
||||
)
|
||||
try:
|
||||
return datetime.strptime(f"{parts[0]}:{parts[1]}", "%H:%M").time()
|
||||
except (ValueError, IndexError) as err:
|
||||
raise ServiceValidationError(
|
||||
f"{field_name} must be in HH:MM or HH:MM:SS format"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_time_format",
|
||||
translation_placeholders={"field_name": field_name},
|
||||
) from err
|
||||
|
||||
|
||||
@@ -103,7 +121,9 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
if not 1 <= segment_id <= 9:
|
||||
raise ServiceValidationError(
|
||||
f"segment_id must be between 1 and 9, got {segment_id}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_segment_id",
|
||||
translation_placeholders={"segment_id": str(segment_id)},
|
||||
)
|
||||
|
||||
valid_modes = {
|
||||
@@ -113,7 +133,12 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
}
|
||||
if batt_mode_str not in valid_modes:
|
||||
raise ServiceValidationError(
|
||||
f"batt_mode must be one of {list(valid_modes.keys())}, got '{batt_mode_str}'"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_batt_mode",
|
||||
translation_placeholders={
|
||||
"batt_mode": batt_mode_str,
|
||||
"allowed_modes": ", ".join(valid_modes),
|
||||
},
|
||||
)
|
||||
batt_mode: int = valid_modes[batt_mode_str]
|
||||
|
||||
@@ -151,11 +176,15 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
if not 0 <= charge_power <= 100:
|
||||
raise ServiceValidationError(
|
||||
f"charge_power must be between 0 and 100, got {charge_power}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_charge_power",
|
||||
translation_placeholders={"value": str(charge_power)},
|
||||
)
|
||||
if not 0 <= charge_stop_soc <= 100:
|
||||
raise ServiceValidationError(
|
||||
f"charge_stop_soc must be between 0 and 100, got {charge_stop_soc}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_charge_stop_soc",
|
||||
translation_placeholders={"value": str(charge_stop_soc)},
|
||||
)
|
||||
|
||||
periods = []
|
||||
@@ -193,11 +222,15 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
if not 0 <= discharge_power <= 100:
|
||||
raise ServiceValidationError(
|
||||
f"discharge_power must be between 0 and 100, got {discharge_power}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_discharge_power",
|
||||
translation_placeholders={"value": str(discharge_power)},
|
||||
)
|
||||
if not 0 <= discharge_stop_soc <= 100:
|
||||
raise ServiceValidationError(
|
||||
f"discharge_stop_soc must be between 0 and 100, got {discharge_stop_soc}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_discharge_stop_soc",
|
||||
translation_placeholders={"value": str(discharge_stop_soc)},
|
||||
)
|
||||
|
||||
periods = []
|
||||
|
||||
@@ -574,6 +574,47 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_error": {
|
||||
"message": "Growatt API error: {error}"
|
||||
},
|
||||
"device_not_configured": {
|
||||
"message": "{device_type} device {serial_number} is not configured for services."
|
||||
},
|
||||
"device_not_found": {
|
||||
"message": "Device {device_id} not found in the device registry."
|
||||
},
|
||||
"device_not_growatt": {
|
||||
"message": "Device {device_id} is not a Growatt device."
|
||||
},
|
||||
"invalid_batt_mode": {
|
||||
"message": "{batt_mode} is not a valid battery mode. Allowed values: {allowed_modes}."
|
||||
},
|
||||
"invalid_charge_power": {
|
||||
"message": "charge_power must be between 0 and 100, got {value}."
|
||||
},
|
||||
"invalid_charge_stop_soc": {
|
||||
"message": "charge_stop_soc must be between 0 and 100, got {value}."
|
||||
},
|
||||
"invalid_discharge_power": {
|
||||
"message": "discharge_power must be between 0 and 100, got {value}."
|
||||
},
|
||||
"invalid_discharge_stop_soc": {
|
||||
"message": "discharge_stop_soc must be between 0 and 100, got {value}."
|
||||
},
|
||||
"invalid_segment_id": {
|
||||
"message": "segment_id must be between 1 and 9, got {segment_id}."
|
||||
},
|
||||
"invalid_time_format": {
|
||||
"message": "{field_name} must be in HH:MM or HH:MM:SS format."
|
||||
},
|
||||
"no_devices_configured": {
|
||||
"message": "No {device_type} devices with token authentication are configured. Actions require {device_type} devices with V1 API access."
|
||||
},
|
||||
"token_auth_required": {
|
||||
"message": "This action requires token authentication (V1 API)."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"batt_mode": {
|
||||
"options": {
|
||||
|
||||
@@ -125,7 +125,11 @@ class GrowattSwitch(CoordinatorEntity[GrowattCoordinator], SwitchEntity):
|
||||
api_value,
|
||||
)
|
||||
except GrowattV1ApiError as e:
|
||||
raise HomeAssistantError(f"Error while setting switch state: {e}") from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error": str(e)},
|
||||
) from e
|
||||
|
||||
# If no exception was raised, the write was successful
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -119,7 +119,6 @@ from .coordinator import (
|
||||
get_core_stats,
|
||||
get_host_info,
|
||||
get_info,
|
||||
get_issues_info,
|
||||
get_network_info,
|
||||
get_os_info,
|
||||
get_store,
|
||||
@@ -158,7 +157,6 @@ __all__ = [
|
||||
"get_core_stats",
|
||||
"get_host_info",
|
||||
"get_info",
|
||||
"get_issues_info",
|
||||
"get_network_info",
|
||||
"get_os_info",
|
||||
"get_store",
|
||||
|
||||
@@ -92,6 +92,7 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_ADDON_DEPRECATED_ARCH,
|
||||
"issue_system_ntp_sync_failed",
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.const import ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import get_addons_list, get_issues_info
|
||||
from . import get_addons_list
|
||||
from .const import (
|
||||
ATTR_SLUG,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
@@ -31,6 +31,7 @@ from .const import (
|
||||
PLACEHOLDER_KEY_COMPONENTS,
|
||||
PLACEHOLDER_KEY_REFERENCE,
|
||||
)
|
||||
from .coordinator import get_issues_info
|
||||
from .handler import get_supervisor_client
|
||||
from .issues import Issue, Suggestion
|
||||
|
||||
|
||||
@@ -177,6 +177,19 @@
|
||||
},
|
||||
"title": "Multiple data disks detected"
|
||||
},
|
||||
"issue_system_ntp_sync_failed": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
"apply_suggestion_fail": "Could not re-enable NTP. Check the Supervisor logs for more details."
|
||||
},
|
||||
"step": {
|
||||
"system_enable_ntp": {
|
||||
"description": "The device could not contact its configured time servers (NTP). Using a secondary online time check, we detected that the system clock was more than 1 hour incorrect. The time has been corrected and the NTP service was temporarily disabled so the correction could be applied. To keep the system time accurate, we recommend fixing the issue preventing access to the NTP servers.\n\nCheck the **Host logs** to investigate why NTP servers could not be reached. Once resolved, select **Submit** to re-enable the NTP service."
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Time synchronization issue detected"
|
||||
},
|
||||
"issue_system_reboot_required": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
|
||||
@@ -117,13 +117,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
# Map raw event type names to friendly names using SENSOR_MAP
|
||||
mapped_events: dict[str, list[int]] = {}
|
||||
for event_type, channels in nvr_events.items():
|
||||
friendly_name = SENSOR_MAP.get(event_type.lower(), event_type)
|
||||
event_key = event_type.lower()
|
||||
# Skip videoloss - used as watchdog by pyhik, not a real sensor
|
||||
if event_key == "videoloss":
|
||||
continue
|
||||
friendly_name = SENSOR_MAP.get(event_key)
|
||||
if friendly_name is None:
|
||||
_LOGGER.debug("Skipping unmapped event type: %s", event_type)
|
||||
continue
|
||||
if friendly_name in mapped_events:
|
||||
mapped_events[friendly_name].extend(channels)
|
||||
else:
|
||||
mapped_events[friendly_name] = list(channels)
|
||||
_LOGGER.debug("Mapped NVR events: %s", mapped_events)
|
||||
camera.inject_events(mapped_events)
|
||||
if mapped_events:
|
||||
camera.inject_events(mapped_events)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"No event triggers returned from %s. "
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.32.0"],
|
||||
"requirements": ["aiohomeconnect==0.33.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -119,6 +119,10 @@ set_program_and_options:
|
||||
- cooking_common_program_hood_automatic
|
||||
- cooking_common_program_hood_venting
|
||||
- cooking_common_program_hood_delayed_shut_off
|
||||
- cooking_oven_program_heating_mode_3_d_heating
|
||||
- cooking_oven_program_heating_mode_air_fry
|
||||
- cooking_oven_program_heating_mode_grill_large_area
|
||||
- cooking_oven_program_heating_mode_grill_small_area
|
||||
- cooking_oven_program_heating_mode_pre_heating
|
||||
- cooking_oven_program_heating_mode_hot_air
|
||||
- cooking_oven_program_heating_mode_hot_air_eco
|
||||
|
||||
@@ -260,12 +260,16 @@
|
||||
"cooking_common_program_hood_automatic": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_automatic%]",
|
||||
"cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_delayed_shut_off%]",
|
||||
"cooking_common_program_hood_venting": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_venting%]",
|
||||
"cooking_oven_program_heating_mode_3_d_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_3_d_heating%]",
|
||||
"cooking_oven_program_heating_mode_air_fry": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_air_fry%]",
|
||||
"cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bottom_heating%]",
|
||||
"cooking_oven_program_heating_mode_bread_baking": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bread_baking%]",
|
||||
"cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_defrost%]",
|
||||
"cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_desiccation%]",
|
||||
"cooking_oven_program_heating_mode_dough_proving": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_dough_proving%]",
|
||||
"cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_frozen_heatup_special%]",
|
||||
"cooking_oven_program_heating_mode_grill_large_area": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_grill_large_area%]",
|
||||
"cooking_oven_program_heating_mode_grill_small_area": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_grill_small_area%]",
|
||||
"cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_100_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_30_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_30_steam%]",
|
||||
@@ -616,12 +620,16 @@
|
||||
"cooking_common_program_hood_automatic": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_automatic%]",
|
||||
"cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_delayed_shut_off%]",
|
||||
"cooking_common_program_hood_venting": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_venting%]",
|
||||
"cooking_oven_program_heating_mode_3_d_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_3_d_heating%]",
|
||||
"cooking_oven_program_heating_mode_air_fry": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_air_fry%]",
|
||||
"cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bottom_heating%]",
|
||||
"cooking_oven_program_heating_mode_bread_baking": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bread_baking%]",
|
||||
"cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_defrost%]",
|
||||
"cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_desiccation%]",
|
||||
"cooking_oven_program_heating_mode_dough_proving": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_dough_proving%]",
|
||||
"cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_frozen_heatup_special%]",
|
||||
"cooking_oven_program_heating_mode_grill_large_area": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_grill_large_area%]",
|
||||
"cooking_oven_program_heating_mode_grill_small_area": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_grill_small_area%]",
|
||||
"cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_100_steam%]",
|
||||
"cooking_oven_program_heating_mode_hot_air_30_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_30_steam%]",
|
||||
@@ -1621,12 +1629,16 @@
|
||||
"cooking_common_program_hood_automatic": "Automatic",
|
||||
"cooking_common_program_hood_delayed_shut_off": "Delayed shut off",
|
||||
"cooking_common_program_hood_venting": "Venting",
|
||||
"cooking_oven_program_heating_mode_3_d_heating": "3D heating",
|
||||
"cooking_oven_program_heating_mode_air_fry": "Air fry",
|
||||
"cooking_oven_program_heating_mode_bottom_heating": "Bottom heating",
|
||||
"cooking_oven_program_heating_mode_bread_baking": "Bread baking",
|
||||
"cooking_oven_program_heating_mode_defrost": "Defrost",
|
||||
"cooking_oven_program_heating_mode_desiccation": "Desiccation",
|
||||
"cooking_oven_program_heating_mode_dough_proving": "Dough proving",
|
||||
"cooking_oven_program_heating_mode_frozen_heatup_special": "Special heat-up for frozen products",
|
||||
"cooking_oven_program_heating_mode_grill_large_area": "Grill (large area)",
|
||||
"cooking_oven_program_heating_mode_grill_small_area": "Grill (small area)",
|
||||
"cooking_oven_program_heating_mode_hot_air": "Hot air",
|
||||
"cooking_oven_program_heating_mode_hot_air_100_steam": "Hot air + 100 RH",
|
||||
"cooking_oven_program_heating_mode_hot_air_30_steam": "Hot air + 30 RH",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.6.0"]
|
||||
"requirements": ["homematicip==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hyponcloud==0.3.0"]
|
||||
"requirements": ["hyponcloud==0.9.0"]
|
||||
}
|
||||
|
||||
@@ -21,11 +21,17 @@ from .coordinator import HypontechConfigEntry, HypontechDataCoordinator
|
||||
from .entity import HypontechEntity, HypontechPlantEntity
|
||||
|
||||
|
||||
def _power_unit(data: OverviewData | PlantData) -> str:
|
||||
"""Return the unit of measurement for power based on the API unit."""
|
||||
return UnitOfPower.KILO_WATT if data.company.upper() == "KW" else UnitOfPower.WATT
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HypontechSensorDescription(SensorEntityDescription):
|
||||
"""Describes Hypontech overview sensor entity."""
|
||||
|
||||
value_fn: Callable[[OverviewData], float | None]
|
||||
unit_fn: Callable[[OverviewData], str] | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -33,15 +39,16 @@ class HypontechPlantSensorDescription(SensorEntityDescription):
|
||||
"""Describes Hypontech plant sensor entity."""
|
||||
|
||||
value_fn: Callable[[PlantData], float | None]
|
||||
unit_fn: Callable[[PlantData], str] | None = None
|
||||
|
||||
|
||||
OVERVIEW_SENSORS: tuple[HypontechSensorDescription, ...] = (
|
||||
HypontechSensorDescription(
|
||||
key="pv_power",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda c: c.power,
|
||||
unit_fn=_power_unit,
|
||||
),
|
||||
HypontechSensorDescription(
|
||||
key="lifetime_energy",
|
||||
@@ -64,10 +71,10 @@ OVERVIEW_SENSORS: tuple[HypontechSensorDescription, ...] = (
|
||||
PLANT_SENSORS: tuple[HypontechPlantSensorDescription, ...] = (
|
||||
HypontechPlantSensorDescription(
|
||||
key="pv_power",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda c: c.power,
|
||||
unit_fn=_power_unit,
|
||||
),
|
||||
HypontechPlantSensorDescription(
|
||||
key="lifetime_energy",
|
||||
@@ -124,6 +131,13 @@ class HypontechOverviewSensor(HypontechEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.account_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
if self.entity_description.unit_fn is not None:
|
||||
return self.entity_description.unit_fn(self.coordinator.data.overview)
|
||||
return super().native_unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
@@ -146,6 +160,13 @@ class HypontechPlantSensor(HypontechPlantEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{plant_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
if self.entity_description.unit_fn is not None:
|
||||
return self.entity_description.unit_fn(self.plant)
|
||||
return super().native_unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
|
||||
@@ -214,12 +214,7 @@ class MoldIndicator(SensorEntity):
|
||||
# Replay current state of source entities
|
||||
for entity_id in self._entities.values():
|
||||
state = self.hass.states.get(entity_id)
|
||||
state_event: Event[EventStateChangedData] = Event(
|
||||
"", {"entity_id": entity_id, "new_state": state, "old_state": None}
|
||||
)
|
||||
self._async_mold_indicator_sensor_state_listener(
|
||||
state_event, update_state=False
|
||||
)
|
||||
self._update_cached_values(entity_id, state)
|
||||
|
||||
self._recalculate()
|
||||
|
||||
@@ -227,9 +222,19 @@ class MoldIndicator(SensorEntity):
|
||||
calculated_state = self._async_calculate_state()
|
||||
self._preview_callback(calculated_state.state, calculated_state.attributes)
|
||||
|
||||
@callback
|
||||
def _update_cached_values(self, entity_id: str, new_state: State | None) -> None:
|
||||
"""Update cached sensor values from a state."""
|
||||
if entity_id == self._entities[CONF_INDOOR_TEMP]:
|
||||
self._indoor_temp = self._get_temperature_from_state(new_state)
|
||||
elif entity_id == self._entities[CONF_OUTDOOR_TEMP]:
|
||||
self._outdoor_temp = self._get_temperature_from_state(new_state)
|
||||
elif entity_id == self._entities[CONF_INDOOR_HUMIDITY]:
|
||||
self._indoor_hum = self._get_humidity_from_state(new_state)
|
||||
|
||||
@callback
|
||||
def _async_mold_indicator_sensor_state_listener(
|
||||
self, event: Event[EventStateChangedData], update_state: bool = True
|
||||
self, event: Event[EventStateChangedData]
|
||||
) -> None:
|
||||
"""Handle state changes for dependent sensors."""
|
||||
entity_id = event.data["entity_id"]
|
||||
@@ -242,16 +247,7 @@ class MoldIndicator(SensorEntity):
|
||||
new_state,
|
||||
)
|
||||
|
||||
# update state depending on which sensor changed
|
||||
if entity_id == self._entities[CONF_INDOOR_TEMP]:
|
||||
self._indoor_temp = self._get_temperature_from_state(new_state)
|
||||
elif entity_id == self._entities[CONF_OUTDOOR_TEMP]:
|
||||
self._outdoor_temp = self._get_temperature_from_state(new_state)
|
||||
elif entity_id == self._entities[CONF_INDOOR_HUMIDITY]:
|
||||
self._indoor_hum = self._get_humidity_from_state(new_state)
|
||||
|
||||
if not update_state:
|
||||
return
|
||||
self._update_cached_values(entity_id, new_state)
|
||||
|
||||
self._recalculate()
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["ohme==1.7.0"]
|
||||
"requirements": ["ohme==1.7.1"]
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[dict[str, GwEntityData
|
||||
)
|
||||
self._connected: bool = False
|
||||
self._current_devices: set[str] = set()
|
||||
self._firmware_list: dict[str, str | None] = {}
|
||||
self._stored_devices: set[str] = set()
|
||||
self.new_devices: set[str] = set()
|
||||
|
||||
@@ -129,6 +130,7 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[dict[str, GwEntityData
|
||||
) from err
|
||||
|
||||
self._add_remove_devices(data)
|
||||
self._update_device_firmware(data)
|
||||
return data
|
||||
|
||||
def _add_remove_devices(self, data: dict[str, GwEntityData]) -> None:
|
||||
@@ -138,6 +140,9 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[dict[str, GwEntityData
|
||||
# 'new_devices' contains all devices present in 'data' at init ('self._current_devices' is empty)
|
||||
# this is required for the proper initialization of all the present platform entities.
|
||||
self.new_devices = set_of_data - self._current_devices
|
||||
for device_id in self.new_devices:
|
||||
self._firmware_list.setdefault(device_id, data[device_id].get("firmware"))
|
||||
|
||||
current_devices = (
|
||||
self._stored_devices if not self._current_devices else self._current_devices
|
||||
)
|
||||
@@ -149,21 +154,52 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[dict[str, GwEntityData
|
||||
"""Clean registries when removed devices found."""
|
||||
device_reg = dr.async_get(self.hass)
|
||||
for device_id in removed_devices:
|
||||
device_entry = device_reg.async_get_device({(DOMAIN, device_id)})
|
||||
if device_entry is None:
|
||||
LOGGER.warning(
|
||||
"Failed to remove %s device/zone %s, not present in device_registry",
|
||||
if (
|
||||
device_entry := device_reg.async_get_device({(DOMAIN, device_id)})
|
||||
) is not None:
|
||||
device_reg.async_update_device(
|
||||
device_entry.id, remove_config_entry_id=self.config_entry.entry_id
|
||||
)
|
||||
LOGGER.debug(
|
||||
"%s %s %s removed from device_registry",
|
||||
DOMAIN,
|
||||
device_entry.model,
|
||||
device_id,
|
||||
)
|
||||
continue # pragma: no cover
|
||||
|
||||
device_reg.async_update_device(
|
||||
device_entry.id, remove_config_entry_id=self.config_entry.entry_id
|
||||
)
|
||||
self._firmware_list.pop(device_id, None)
|
||||
|
||||
def _update_device_firmware(self, data: dict[str, GwEntityData]) -> None:
|
||||
"""Detect firmware changes and update the device registry."""
|
||||
for device_id, device in data.items():
|
||||
# Only update firmware when the key is present and not None, to avoid
|
||||
# wiping stored firmware on partial or transient updates.
|
||||
if "firmware" not in device:
|
||||
continue
|
||||
new_firmware = device.get("firmware")
|
||||
if new_firmware is None:
|
||||
continue
|
||||
if (
|
||||
device_id in self._firmware_list
|
||||
and new_firmware != self._firmware_list[device_id]
|
||||
):
|
||||
updated = self._update_firmware_in_dr(device_id, new_firmware)
|
||||
if updated:
|
||||
self._firmware_list[device_id] = new_firmware
|
||||
|
||||
def _update_firmware_in_dr(self, device_id: str, firmware: str | None) -> bool:
|
||||
"""Update device sw_version in device_registry."""
|
||||
device_reg = dr.async_get(self.hass)
|
||||
if (
|
||||
device_entry := device_reg.async_get_device({(DOMAIN, device_id)})
|
||||
) is not None:
|
||||
device_reg.async_update_device(device_entry.id, sw_version=firmware)
|
||||
LOGGER.debug(
|
||||
"%s %s %s removed from device_registry",
|
||||
"Firmware in device_registry updated for %s %s %s",
|
||||
DOMAIN,
|
||||
device_entry.model,
|
||||
device_id,
|
||||
)
|
||||
return True
|
||||
|
||||
return False # pragma: no cover
|
||||
|
||||
@@ -74,6 +74,26 @@ CONTAINER_BUTTONS: tuple[PortainerButtonDescription, ...] = (
|
||||
)
|
||||
),
|
||||
),
|
||||
PortainerButtonDescription(
|
||||
key="pause",
|
||||
translation_key="pause_container",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=(
|
||||
lambda portainer, endpoint_id, container_id: portainer.pause_container(
|
||||
endpoint_id, container_id
|
||||
)
|
||||
),
|
||||
),
|
||||
PortainerButtonDescription(
|
||||
key="resume",
|
||||
translation_key="resume_container",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=(
|
||||
lambda portainer, endpoint_id, container_id: portainer.unpause_container(
|
||||
endpoint_id, container_id
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"pause_container": {
|
||||
"default": "mdi:pause-circle"
|
||||
},
|
||||
"resume_container": {
|
||||
"default": "mdi:play"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"api_version": {
|
||||
"default": "mdi:api"
|
||||
|
||||
@@ -66,8 +66,14 @@
|
||||
"images_prune": {
|
||||
"name": "Prune unused images"
|
||||
},
|
||||
"pause_container": {
|
||||
"name": "Pause container"
|
||||
},
|
||||
"restart_container": {
|
||||
"name": "Restart container"
|
||||
},
|
||||
"resume_container": {
|
||||
"name": "Resume container"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -44,11 +44,9 @@ CONNECTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
CODE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_CODE): cv.string,
|
||||
@@ -86,6 +84,11 @@ SWITCHABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
|
||||
class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Satel Integra config flow."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.connection_data: dict[str, Any] = {}
|
||||
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 1
|
||||
|
||||
@@ -119,24 +122,71 @@ class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
valid = await self.test_connection(
|
||||
user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
)
|
||||
|
||||
if valid:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST],
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
},
|
||||
options={CONF_CODE: user_input.get(CONF_CODE)},
|
||||
)
|
||||
if await self.test_connection(user_input[CONF_HOST], user_input[CONF_PORT]):
|
||||
self.connection_data = {
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
}
|
||||
return await self.async_step_code()
|
||||
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=CONNECTION_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_code(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle code configuration."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self.connection_data[CONF_HOST],
|
||||
data=self.connection_data,
|
||||
options={CONF_CODE: user_input.get(CONF_CODE)},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="code",
|
||||
data_schema=CODE_SCHEMA,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
if await self.test_connection(user_input[CONF_HOST], user_input[CONF_PORT]):
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
},
|
||||
title=user_input[CONF_HOST],
|
||||
reload_even_if_entry_is_unchanged=False,
|
||||
)
|
||||
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
suggested_values: dict[str, Any] = {
|
||||
**reconfigure_entry.data,
|
||||
**(user_input or {}),
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
CONNECTION_SCHEMA, suggested_values
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def test_connection(self, host: str, port: int) -> bool:
|
||||
|
||||
@@ -5,20 +5,37 @@
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"code": {
|
||||
"data": {
|
||||
"code": "[%key:component::satel_integra::common::code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"code": "[%key:component::satel_integra::common::code_input_description%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::satel_integra::config::step::user::data_description::host%]",
|
||||
"port": "[%key:component::satel_integra::config::step::user::data_description::port%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"code": "[%key:component::satel_integra::common::code%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"code": "[%key:component::satel_integra::common::code_input_description%]",
|
||||
"host": "The IP address of the alarm panel",
|
||||
"port": "The port of the alarm panel"
|
||||
}
|
||||
|
||||
17
homeassistant/components/schedule/condition.py
Normal file
17
homeassistant/components/schedule/condition.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides conditions for schedules."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
|
||||
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the schedule conditions."""
|
||||
return CONDITIONS
|
||||
17
homeassistant/components/schedule/conditions.yaml
Normal file
17
homeassistant/components/schedule/conditions.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: schedule
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:calendar-blank"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:calendar-clock"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_schedule": {
|
||||
"service": "mdi:calendar-export"
|
||||
|
||||
@@ -1,8 +1,32 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted schedules.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted schedules to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more schedule blocks are currently not active.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::schedule::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::schedule::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Schedule is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more schedule blocks are currently active.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::schedule::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::schedule::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Schedule is on"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::schedule::title%]",
|
||||
@@ -25,6 +49,12 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The password for accessing your SFR box's web interface, the default is the WiFi security key found on the device label",
|
||||
"password": "The password for accessing your SFR box's web interface, the default is the Wi-Fi security key found on the device label",
|
||||
"username": "The username for accessing your SFR box's web interface, the default is 'admin'"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -2,21 +2,21 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_on_wifi": "Device is already connected to WiFi and was discovered via the network.",
|
||||
"already_on_wifi": "Device is already connected to Wi-Fi and was discovered via the network.",
|
||||
"another_device": "Reconfiguration was unsuccessful, the IP address/hostname of another Shelly device was used.",
|
||||
"ble_not_permitted": "Device is bound to a Shelly cloud account and cannot be provisioned via Bluetooth. Please use the Shelly app to provision WiFi credentials, then add the device when it appears on your network.",
|
||||
"ble_not_permitted": "Device is bound to a Shelly cloud account and cannot be provisioned via Bluetooth. Please use the Shelly app to provision Wi-Fi credentials, then add the device when it appears on your network.",
|
||||
"cannot_connect": "Failed to connect to the device. Ensure the device is powered on and within range.",
|
||||
"custom_port_not_supported": "[%key:component::shelly::config::error::custom_port_not_supported%]",
|
||||
"firmware_not_fully_provisioned": "Device not fully provisioned. Please contact Shelly support",
|
||||
"invalid_discovery_info": "Invalid Bluetooth discovery information.",
|
||||
"ipv6_not_supported": "IPv6 is not supported.",
|
||||
"mac_address_mismatch": "[%key:component::shelly::config::error::mac_address_mismatch%]",
|
||||
"no_wifi_networks": "No WiFi networks found during scan.",
|
||||
"no_wifi_networks": "No Wi-Fi networks found during scan.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reauth_unsuccessful": "Re-authentication was unsuccessful, please remove the integration and set it up again.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"wifi_provisioned": "WiFi credentials for {ssid} have been provisioned to {name}. The device is connecting to WiFi and will complete setup automatically."
|
||||
"wifi_provisioned": "Wi-Fi credentials for {ssid} have been provisioned to {name}. The device is connecting to Wi-Fi and will complete setup automatically."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -28,20 +28,20 @@
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"progress": {
|
||||
"provisioning": "Provisioning WiFi credentials and waiting for device to connect"
|
||||
"provisioning": "Provisioning Wi-Fi credentials and waiting for device to connect"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"data": {
|
||||
"disable_ap": "Disable WiFi access point after provisioning",
|
||||
"disable_ap": "Disable Wi-Fi access point after provisioning",
|
||||
"disable_ble_rpc": "Disable Bluetooth RPC after provisioning"
|
||||
},
|
||||
"data_description": {
|
||||
"disable_ap": "For improved security, disable the WiFi access point after successfully connecting to your network.",
|
||||
"disable_ble_rpc": "For improved security, disable Bluetooth RPC access after WiFi is configured. Bluetooth will remain enabled for BLE sensors and buttons."
|
||||
"disable_ap": "For improved security, disable the Wi-Fi access point after successfully connecting to your network.",
|
||||
"disable_ble_rpc": "For improved security, disable Bluetooth RPC access after Wi-Fi is configured. Bluetooth will remain enabled for BLE sensors and buttons."
|
||||
},
|
||||
"description": "The Shelly device {name} has been discovered via Bluetooth but is not connected to WiFi.\n\nDo you want to provision WiFi credentials to this device?",
|
||||
"title": "Provision WiFi via Bluetooth"
|
||||
"description": "The Shelly device {name} has been discovered via Bluetooth but is not connected to Wi-Fi.\n\nDo you want to provision Wi-Fi credentials to this device?",
|
||||
"title": "Provision Wi-Fi via Bluetooth"
|
||||
},
|
||||
"confirm_discovery": {
|
||||
"description": "Do you want to set up the {model} at {host}?\n\nBattery-powered devices that are password-protected must be woken up before continuing with setting up.\nBattery-powered devices that are not password-protected will be added when the device wakes up, you can now manually wake the device up using a button on it or wait for the next data update from the device."
|
||||
@@ -103,16 +103,16 @@
|
||||
"wifi_scan": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"ssid": "WiFi network"
|
||||
"ssid": "Wi-Fi network"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "Password for the WiFi network.",
|
||||
"ssid": "Select a WiFi network from the list or enter a custom SSID for hidden networks."
|
||||
"password": "Password for the Wi-Fi network.",
|
||||
"ssid": "Select a Wi-Fi network from the list or enter a custom SSID for hidden networks."
|
||||
},
|
||||
"description": "Select a WiFi network and enter the password to provision the device."
|
||||
"description": "Select a Wi-Fi network and enter the password to provision the device."
|
||||
},
|
||||
"wifi_scan_failed": {
|
||||
"description": "Failed to scan for WiFi networks via Bluetooth. The device may be out of range or Bluetooth connection failed. Would you like to try again?"
|
||||
"description": "Failed to scan for Wi-Fi networks via Bluetooth. The device may be out of range or Bluetooth connection failed. Would you like to try again?"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -727,16 +727,16 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} has an open WiFi access point enabled without a password. This is a security risk as anyone nearby can connect to the device.\n\nNote: If you disable the access point, the device may need to restart.",
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} has an open Wi-Fi access point enabled without a password. This is a security risk as anyone nearby can connect to the device.\n\nNote: If you disable the access point, the device may need to restart.",
|
||||
"menu_options": {
|
||||
"confirm": "Disable WiFi access point",
|
||||
"confirm": "Disable Wi-Fi access point",
|
||||
"ignore": "Ignore"
|
||||
},
|
||||
"title": "[%key:component::shelly::issues::open_wifi_ap::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Open WiFi access point on {device_name}"
|
||||
"title": "Open Wi-Fi access point on {device_name}"
|
||||
},
|
||||
"outbound_websocket_incorrectly_enabled": {
|
||||
"fix_flow": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "smarla",
|
||||
"name": "Swing2Sleep Smarla",
|
||||
"codeowners": ["@explicatis", "@rlint-explicatis"],
|
||||
"codeowners": ["@explicatis", "@johannes-exp"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/smarla",
|
||||
"integration_type": "device",
|
||||
|
||||
@@ -3,16 +3,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pysmartthings import Capability, Command, SmartThings
|
||||
from pysmartthings import Attribute, Capability, Category, Command, SmartThings
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FullDevice, SmartThingsConfigEntry
|
||||
from .const import MAIN
|
||||
from .const import DOMAIN, MAIN
|
||||
from .entity import SmartThingsEntity
|
||||
|
||||
|
||||
@@ -22,7 +24,11 @@ class SmartThingsButtonDescription(ButtonEntityDescription):
|
||||
|
||||
key: Capability
|
||||
command: Command
|
||||
command_identifier: str | None = None
|
||||
components: list[str] | None = None
|
||||
argument: int | str | list[Any] | dict[str, Any] | None = None
|
||||
requires_remote_control_status: bool = False
|
||||
requires_dishwasher_machine_state: set[str] | None = None
|
||||
|
||||
|
||||
CAPABILITIES_TO_BUTTONS: dict[Capability | str, SmartThingsButtonDescription] = {
|
||||
@@ -53,6 +59,50 @@ CAPABILITIES_TO_BUTTONS: dict[Capability | str, SmartThingsButtonDescription] =
|
||||
}
|
||||
|
||||
|
||||
DISHWASHER_OPERATION_COMMANDS_TO_BUTTONS: dict[
|
||||
Command | str, SmartThingsButtonDescription
|
||||
] = {
|
||||
Command.CANCEL: SmartThingsButtonDescription(
|
||||
key=Capability.SAMSUNG_CE_DISHWASHER_OPERATION,
|
||||
translation_key="cancel",
|
||||
command_identifier="drain",
|
||||
command=Command.CANCEL,
|
||||
argument=[True],
|
||||
requires_remote_control_status=True,
|
||||
),
|
||||
Command.PAUSE: SmartThingsButtonDescription(
|
||||
key=Capability.SAMSUNG_CE_DISHWASHER_OPERATION,
|
||||
translation_key="pause",
|
||||
command=Command.PAUSE,
|
||||
requires_remote_control_status=True,
|
||||
requires_dishwasher_machine_state={"run"},
|
||||
),
|
||||
Command.RESUME: SmartThingsButtonDescription(
|
||||
key=Capability.SAMSUNG_CE_DISHWASHER_OPERATION,
|
||||
translation_key="resume",
|
||||
command=Command.RESUME,
|
||||
requires_remote_control_status=True,
|
||||
requires_dishwasher_machine_state={"pause"},
|
||||
),
|
||||
Command.START: SmartThingsButtonDescription(
|
||||
key=Capability.SAMSUNG_CE_DISHWASHER_OPERATION,
|
||||
translation_key="start",
|
||||
command=Command.START,
|
||||
requires_remote_control_status=True,
|
||||
requires_dishwasher_machine_state={"stop"},
|
||||
),
|
||||
}
|
||||
|
||||
DISHWASHER_CANCEL_AND_DRAIN_BUTTON = SmartThingsButtonDescription(
|
||||
key=Capability.CUSTOM_SUPPORTED_OPTIONS,
|
||||
translation_key="cancel_and_drain",
|
||||
command_identifier="89",
|
||||
command=Command.SET_COURSE,
|
||||
argument="89",
|
||||
requires_remote_control_status=True,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: SmartThingsConfigEntry,
|
||||
@@ -60,13 +110,41 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Add button entities for a config entry."""
|
||||
entry_data = entry.runtime_data
|
||||
async_add_entities(
|
||||
SmartThingsButtonEntity(entry_data.client, device, description, component)
|
||||
entities: list[SmartThingsEntity] = []
|
||||
entities.extend(
|
||||
SmartThingsButtonEntity(
|
||||
entry_data.client, device, description, Capability(capability), component
|
||||
)
|
||||
for capability, description in CAPABILITIES_TO_BUTTONS.items()
|
||||
for device in entry_data.devices.values()
|
||||
for component in description.components or [MAIN]
|
||||
if component in device.status and capability in device.status[component]
|
||||
)
|
||||
entities.extend(
|
||||
SmartThingsButtonEntity(
|
||||
entry_data.client,
|
||||
device,
|
||||
description,
|
||||
Capability.SAMSUNG_CE_DISHWASHER_OPERATION,
|
||||
)
|
||||
for device in entry_data.devices.values()
|
||||
if Capability.SAMSUNG_CE_DISHWASHER_OPERATION in device.status[MAIN]
|
||||
for description in DISHWASHER_OPERATION_COMMANDS_TO_BUTTONS.values()
|
||||
)
|
||||
entities.extend(
|
||||
SmartThingsButtonEntity(
|
||||
entry_data.client,
|
||||
device,
|
||||
DISHWASHER_CANCEL_AND_DRAIN_BUTTON,
|
||||
Capability.CUSTOM_SUPPORTED_OPTIONS,
|
||||
)
|
||||
for device in entry_data.devices.values()
|
||||
if (
|
||||
device.device.components[MAIN].manufacturer_category == Category.DISHWASHER
|
||||
and Capability.CUSTOM_SUPPORTED_OPTIONS in device.status[MAIN]
|
||||
)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class SmartThingsButtonEntity(SmartThingsEntity, ButtonEntity):
|
||||
@@ -79,16 +157,53 @@ class SmartThingsButtonEntity(SmartThingsEntity, ButtonEntity):
|
||||
client: SmartThings,
|
||||
device: FullDevice,
|
||||
entity_description: SmartThingsButtonDescription,
|
||||
component: str,
|
||||
capability: Capability,
|
||||
component: str = MAIN,
|
||||
) -> None:
|
||||
"""Initialize the instance."""
|
||||
super().__init__(client, device, set(), component=component)
|
||||
capabilities = set()
|
||||
if entity_description.requires_remote_control_status:
|
||||
capabilities.add(Capability.REMOTE_CONTROL_STATUS)
|
||||
if entity_description.requires_dishwasher_machine_state:
|
||||
capabilities.add(Capability.DISHWASHER_OPERATING_STATE)
|
||||
super().__init__(client, device, capabilities)
|
||||
self.entity_description = entity_description
|
||||
self.button_capability = capability
|
||||
self._attr_unique_id = f"{device.device.device_id}_{component}_{entity_description.key}_{entity_description.command}"
|
||||
if entity_description.command_identifier is not None:
|
||||
self._attr_unique_id += f"_{entity_description.command_identifier}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
self._validate_before_execute()
|
||||
await self.execute_device_command(
|
||||
self.entity_description.key,
|
||||
self.button_capability,
|
||||
self.entity_description.command,
|
||||
self.entity_description.argument,
|
||||
)
|
||||
|
||||
def _validate_before_execute(self) -> None:
|
||||
"""Validate that the command can be executed."""
|
||||
if (
|
||||
self.entity_description.requires_remote_control_status
|
||||
and self.get_attribute_value(
|
||||
Capability.REMOTE_CONTROL_STATUS, Attribute.REMOTE_CONTROL_ENABLED
|
||||
)
|
||||
== "false"
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="remote_control_status"
|
||||
)
|
||||
if (
|
||||
self.entity_description.requires_dishwasher_machine_state
|
||||
and self.get_attribute_value(
|
||||
Capability.DISHWASHER_OPERATING_STATE, Attribute.MACHINE_STATE
|
||||
)
|
||||
not in self.entity_description.requires_dishwasher_machine_state
|
||||
):
|
||||
state_list = " or ".join(
|
||||
self.entity_description.requires_dishwasher_machine_state
|
||||
)
|
||||
raise ServiceValidationError(
|
||||
f"Can only be updated when dishwasher machine state is {state_list}"
|
||||
)
|
||||
|
||||
@@ -27,12 +27,27 @@
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"cancel": {
|
||||
"default": "mdi:stop"
|
||||
},
|
||||
"cancel_and_drain": {
|
||||
"default": "mdi:stop"
|
||||
},
|
||||
"pause": {
|
||||
"default": "mdi:pause"
|
||||
},
|
||||
"reset_hepa_filter": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"reset_water_filter": {
|
||||
"default": "mdi:reload"
|
||||
},
|
||||
"resume": {
|
||||
"default": "mdi:play"
|
||||
},
|
||||
"start": {
|
||||
"default": "mdi:play"
|
||||
},
|
||||
"stop": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
|
||||
@@ -1221,6 +1221,24 @@ CAPABILITY_TO_SENSORS: dict[
|
||||
)
|
||||
]
|
||||
},
|
||||
Capability.SAMSUNG_CE_MICROFIBER_FILTER_OPERATING_STATE: {
|
||||
Attribute.MICROFIBER_FILTER_JOB_STATE: [
|
||||
SmartThingsSensorEntityDescription(
|
||||
key=Attribute.MICROFIBER_FILTER_JOB_STATE,
|
||||
translation_key="microfiber_filter_job_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options_attribute=Attribute.SUPPORTED_JOB_STATES,
|
||||
)
|
||||
],
|
||||
Attribute.OPERATING_STATE: [
|
||||
SmartThingsSensorEntityDescription(
|
||||
key=Attribute.OPERATING_STATE,
|
||||
translation_key="microfiber_filter_operating_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options_attribute=Attribute.SUPPORTED_OPERATING_STATES,
|
||||
)
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -93,6 +93,15 @@
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"cancel": {
|
||||
"name": "Cancel"
|
||||
},
|
||||
"cancel_and_drain": {
|
||||
"name": "Cancel and drain"
|
||||
},
|
||||
"pause": {
|
||||
"name": "[%key:common::action::pause%]"
|
||||
},
|
||||
"reset_hepa_filter": {
|
||||
"name": "Reset HEPA filter"
|
||||
},
|
||||
@@ -102,6 +111,12 @@
|
||||
"reset_water_filter": {
|
||||
"name": "Reset water filter"
|
||||
},
|
||||
"resume": {
|
||||
"name": "Resume"
|
||||
},
|
||||
"start": {
|
||||
"name": "[%key:common::action::start%]"
|
||||
},
|
||||
"stop": {
|
||||
"name": "[%key:common::action::stop%]"
|
||||
}
|
||||
@@ -570,6 +585,25 @@
|
||||
"media_playback_status": {
|
||||
"name": "Media playback status"
|
||||
},
|
||||
"microfiber_filter_job_state": {
|
||||
"name": "[%key:component::smartthings::entity::sensor::dishwasher_job_state::name%]",
|
||||
"state": {
|
||||
"bypassing": "Bypassing",
|
||||
"filtering": "Filtering",
|
||||
"none": "[%key:component::smartthings::entity::sensor::washer_job_state::state::none%]",
|
||||
"sensing": "Weight sensing",
|
||||
"stopping": "Stopping",
|
||||
"waiting": "Waiting"
|
||||
}
|
||||
},
|
||||
"microfiber_filter_operating_state": {
|
||||
"name": "[%key:component::smartthings::entity::sensor::cooktop_operating_state::name%]",
|
||||
"state": {
|
||||
"paused": "[%key:common::state::paused%]",
|
||||
"ready": "[%key:component::smartthings::entity::sensor::oven_machine_state::state::ready%]",
|
||||
"running": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]"
|
||||
}
|
||||
},
|
||||
"odor_sensor": {
|
||||
"name": "Odor sensor"
|
||||
},
|
||||
@@ -916,6 +950,9 @@
|
||||
"bubble_soak": {
|
||||
"name": "Bubble Soak"
|
||||
},
|
||||
"bypass_mode": {
|
||||
"name": "Bypass mode"
|
||||
},
|
||||
"display_lighting": {
|
||||
"name": "Display lighting"
|
||||
},
|
||||
@@ -1009,6 +1046,9 @@
|
||||
"exceptions": {
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"remote_control_status": {
|
||||
"message": "Can only be changed when remote control is enabled"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -101,6 +101,15 @@ CAPABILITY_TO_COMMAND_SWITCHES: dict[
|
||||
command=Command.SET_STEAM_CLOSET_AUTO_CYCLE_LINK,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
Capability.SAMSUNG_CE_MICROFIBER_FILTER_SETTINGS: SmartThingsCommandSwitchEntityDescription(
|
||||
key=Capability.SAMSUNG_CE_MICROFIBER_FILTER_SETTINGS,
|
||||
translation_key="bypass_mode",
|
||||
status_attribute=Attribute.BYPASS_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
on_key="enabled",
|
||||
off_key="disabled",
|
||||
command=Command.SET_BYPASS_MODE,
|
||||
),
|
||||
}
|
||||
CAPABILITY_TO_SWITCHES: dict[Capability | str, SmartThingsSwitchEntityDescription] = {
|
||||
Capability.SAMSUNG_CE_AIR_CONDITIONER_BEEP: SmartThingsSwitchEntityDescription(
|
||||
|
||||
@@ -290,16 +290,29 @@ class SnapcastClientDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
and entity.unique_id != self.unique_id
|
||||
]
|
||||
|
||||
# Get unique ID prefix for this host
|
||||
unique_id_prefix = self.get_unique_id(self.coordinator.host_id, "")
|
||||
for client in clients:
|
||||
# Valid entity is a snapcast client
|
||||
# Validate entity is a snapcast client
|
||||
if not client.unique_id.startswith(CLIENT_PREFIX):
|
||||
raise ServiceValidationError(
|
||||
f"Entity '{client.entity_id}' is not a Snapcast client device."
|
||||
)
|
||||
|
||||
# Validate client belongs to the same server
|
||||
if not client.unique_id.startswith(unique_id_prefix):
|
||||
raise ServiceValidationError(
|
||||
f"Entity '{client.entity_id}' does not belong to the same Snapcast server."
|
||||
)
|
||||
|
||||
# Extract client ID and join it to the current group
|
||||
identifier = client.unique_id.split("_")[-1]
|
||||
await self._current_group.add_client(identifier)
|
||||
identifier = client.unique_id.removeprefix(unique_id_prefix)
|
||||
try:
|
||||
await self._current_group.add_client(identifier)
|
||||
except KeyError as e:
|
||||
raise ServiceValidationError(
|
||||
f"Client with identifier '{identifier}' does not exist on the server."
|
||||
) from e
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -178,31 +178,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
connectivity=False, token=True, busy=False
|
||||
)
|
||||
except ClientConnectionError as err:
|
||||
_LOGGER.debug("Connection error during setup at %s:%s: %s", host, port, err)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={
|
||||
"host": host,
|
||||
"port": str(port),
|
||||
"error": str(err),
|
||||
},
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"host": host, "port": str(port)},
|
||||
) from err
|
||||
except TimeoutError as err:
|
||||
_LOGGER.debug("Timeout during setup at %s:%s: %s", host, port, err)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect",
|
||||
translation_placeholders={"host": host, "port": str(port)},
|
||||
) from err
|
||||
except Exception as err:
|
||||
_LOGGER.exception("Unexpected error setting up Splunk")
|
||||
_LOGGER.exception("Unexpected setup error at %s:%s", host, port)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unexpected_error",
|
||||
translation_placeholders={
|
||||
"host": host,
|
||||
"port": str(port),
|
||||
"error": str(err),
|
||||
},
|
||||
translation_key="unexpected_connect_error",
|
||||
) from err
|
||||
|
||||
if not connectivity_ok:
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"ssl": "[%key:common::config_flow::data::ssl%]",
|
||||
"token": "HTTP Event Collector token",
|
||||
"token": "[%key:component::splunk::config::step::user::data::token%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
@@ -72,17 +72,14 @@
|
||||
"cannot_connect": {
|
||||
"message": "Unable to connect to Splunk at {host}:{port}."
|
||||
},
|
||||
"connection_error": {
|
||||
"message": "Unable to connect to Splunk at {host}:{port}: {error}."
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"timeout_connect": {
|
||||
"message": "Connection to Splunk at {host}:{port} timed out."
|
||||
},
|
||||
"unexpected_error": {
|
||||
"message": "Unexpected error while connecting to Splunk at {host}:{port}: {error}."
|
||||
"unexpected_connect_error": {
|
||||
"message": "Unexpected error while connecting to Splunk."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -397,11 +397,11 @@ def _metadata_from_header(request: web.Request) -> SpeechMetadata:
|
||||
try:
|
||||
return SpeechMetadata(
|
||||
language=args["language"],
|
||||
format=args["format"],
|
||||
codec=args["codec"],
|
||||
bit_rate=args["bit_rate"],
|
||||
sample_rate=args["sample_rate"],
|
||||
channel=args["channel"],
|
||||
format=AudioFormats(args["format"]),
|
||||
codec=AudioCodecs(args["codec"]),
|
||||
bit_rate=AudioBitRates(int(args["bit_rate"])),
|
||||
sample_rate=AudioSampleRates(int(args["sample_rate"])),
|
||||
channel=AudioChannels(int(args["channel"])),
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ValueError(f"Wrong format of X-Speech-Content: {err}") from err
|
||||
|
||||
@@ -23,12 +23,6 @@ class SpeechMetadata:
|
||||
sample_rate: AudioSampleRates
|
||||
channel: AudioChannels
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Finish initializing the metadata."""
|
||||
self.bit_rate = AudioBitRates(int(self.bit_rate))
|
||||
self.sample_rate = AudioSampleRates(int(self.sample_rate))
|
||||
self.channel = AudioChannels(int(self.channel))
|
||||
|
||||
|
||||
@dataclass
|
||||
class SpeechResult:
|
||||
|
||||
@@ -227,6 +227,9 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# Clear saved credentials if auth failed
|
||||
self._cloud_username = None
|
||||
self._cloud_password = None
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error retrieving encryption key")
|
||||
errors = {"base": "unknown"}
|
||||
else:
|
||||
return await self.async_step_encrypted_key(key_details)
|
||||
|
||||
@@ -366,6 +369,9 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.debug("Authentication failed: %s", ex, exc_info=True)
|
||||
errors = {"base": "auth_failed"}
|
||||
description_placeholders = {"error_detail": str(ex)}
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error during cloud login")
|
||||
errors = {"base": "unknown"}
|
||||
else:
|
||||
# Save credentials temporarily for the duration of this flow
|
||||
# to avoid re-prompting if encrypted device auth is needed
|
||||
|
||||
@@ -9,7 +9,8 @@
|
||||
},
|
||||
"error": {
|
||||
"auth_failed": "Authentication failed: {error_detail}",
|
||||
"encryption_key_invalid": "Key ID or encryption key is invalid"
|
||||
"encryption_key_invalid": "Key ID or encryption key is invalid",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"flow_title": "{name} ({address})",
|
||||
"step": {
|
||||
|
||||
@@ -13,7 +13,7 @@ from tesla_fleet_api.exceptions import (
|
||||
TeslaFleetError,
|
||||
)
|
||||
from tesla_fleet_api.tessie import Tessie
|
||||
from tessie_api import get_battery, get_state_of_all_vehicles
|
||||
from tessie_api import get_state_of_all_vehicles
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
@@ -28,7 +28,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
||||
from .const import DOMAIN, MODELS
|
||||
from .coordinator import (
|
||||
TessieBatteryHealthCoordinator,
|
||||
TessieEnergyHistoryCoordinator,
|
||||
TessieEnergySiteInfoCoordinator,
|
||||
TessieEnergySiteLiveCoordinator,
|
||||
@@ -74,25 +73,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
|
||||
except ClientError as e:
|
||||
raise ConfigEntryNotReady from e
|
||||
|
||||
try:
|
||||
batteries = await asyncio.gather(
|
||||
*(
|
||||
get_battery(
|
||||
session=session,
|
||||
api_key=api_key,
|
||||
vin=vehicle["vin"],
|
||||
)
|
||||
for vehicle in state_of_all_vehicles["results"]
|
||||
if vehicle["last_state"] is not None
|
||||
)
|
||||
)
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.UNAUTHORIZED:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
raise ConfigEntryError("Setup failed, unable to get battery data") from e
|
||||
except ClientError as e:
|
||||
raise ConfigEntryNotReady from e
|
||||
|
||||
vehicles = [
|
||||
TessieVehicleData(
|
||||
vin=vehicle["vin"],
|
||||
@@ -103,13 +83,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
|
||||
vin=vehicle["vin"],
|
||||
data=vehicle["last_state"],
|
||||
),
|
||||
battery_coordinator=TessieBatteryHealthCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
api_key=api_key,
|
||||
vin=vehicle["vin"],
|
||||
data=battery,
|
||||
),
|
||||
device=DeviceInfo(
|
||||
identifiers={(DOMAIN, vehicle["vin"])},
|
||||
manufacturer="Tesla",
|
||||
@@ -126,15 +99,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
|
||||
serial_number=vehicle["vin"],
|
||||
),
|
||||
)
|
||||
for vehicle, battery in zip(
|
||||
(
|
||||
v
|
||||
for v in state_of_all_vehicles["results"]
|
||||
if v["last_state"] is not None
|
||||
),
|
||||
batteries,
|
||||
strict=True,
|
||||
)
|
||||
for vehicle in state_of_all_vehicles["results"]
|
||||
if vehicle["last_state"] is not None
|
||||
]
|
||||
|
||||
# Energy Sites
|
||||
|
||||
@@ -11,7 +11,7 @@ from aiohttp import ClientResponseError
|
||||
from tesla_fleet_api.const import TeslaEnergyPeriod
|
||||
from tesla_fleet_api.exceptions import InvalidToken, MissingToken, TeslaFleetError
|
||||
from tesla_fleet_api.tessie import EnergySite
|
||||
from tessie_api import get_battery, get_state
|
||||
from tessie_api import get_state
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -87,48 +87,6 @@ class TessieStateUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
return flatten(vehicle)
|
||||
|
||||
|
||||
class TessieBatteryHealthCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to manage fetching battery health data from the Tessie API."""
|
||||
|
||||
config_entry: TessieConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: TessieConfigEntry,
|
||||
api_key: str,
|
||||
vin: str,
|
||||
data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize Tessie Battery Health coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="Tessie Battery Health",
|
||||
update_interval=timedelta(seconds=TESSIE_SYNC_INTERVAL),
|
||||
)
|
||||
self.api_key = api_key
|
||||
self.vin = vin
|
||||
self.session = async_get_clientsession(hass)
|
||||
self.data = data
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update battery health data using Tessie API."""
|
||||
try:
|
||||
data = await get_battery(
|
||||
session=self.session,
|
||||
api_key=self.api_key,
|
||||
vin=self.vin,
|
||||
)
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.UNAUTHORIZED:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
raise UpdateFailed from e
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class TessieEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to manage fetching energy site live status from the Tessie API."""
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ async def async_get_config_entry_diagnostics(
|
||||
vehicles = [
|
||||
{
|
||||
"data": async_redact_data(x.data_coordinator.data, VEHICLE_REDACT),
|
||||
"battery": x.battery_coordinator.data,
|
||||
}
|
||||
for x in entry.runtime_data.vehicles
|
||||
]
|
||||
|
||||
@@ -12,7 +12,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, TRANSLATED_ERRORS
|
||||
from .coordinator import (
|
||||
TessieBatteryHealthCoordinator,
|
||||
TessieEnergyHistoryCoordinator,
|
||||
TessieEnergySiteInfoCoordinator,
|
||||
TessieEnergySiteLiveCoordinator,
|
||||
@@ -24,7 +23,6 @@ from .models import TessieEnergyData, TessieVehicleData
|
||||
class TessieBaseEntity(
|
||||
CoordinatorEntity[
|
||||
TessieStateUpdateCoordinator
|
||||
| TessieBatteryHealthCoordinator
|
||||
| TessieEnergySiteInfoCoordinator
|
||||
| TessieEnergySiteLiveCoordinator
|
||||
| TessieEnergyHistoryCoordinator
|
||||
@@ -37,15 +35,16 @@ class TessieBaseEntity(
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: TessieStateUpdateCoordinator
|
||||
| TessieBatteryHealthCoordinator
|
||||
| TessieEnergySiteInfoCoordinator
|
||||
| TessieEnergySiteLiveCoordinator
|
||||
| TessieEnergyHistoryCoordinator,
|
||||
key: str,
|
||||
data_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Tessie entity."""
|
||||
|
||||
self.key = key
|
||||
self.data_key = data_key or key
|
||||
self._attr_translation_key = key
|
||||
super().__init__(coordinator)
|
||||
self._async_update_attrs()
|
||||
@@ -53,11 +52,11 @@ class TessieBaseEntity(
|
||||
@property
|
||||
def _value(self) -> Any:
|
||||
"""Return value from coordinator data."""
|
||||
return self.coordinator.data.get(self.key)
|
||||
return self.coordinator.data.get(self.data_key)
|
||||
|
||||
def get(self, key: str | None = None, default: Any | None = None) -> Any:
|
||||
"""Return a specific value from coordinator data."""
|
||||
return self.coordinator.data.get(key or self.key, default)
|
||||
return self.coordinator.data.get(key or self.data_key, default)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
@@ -76,6 +75,7 @@ class TessieEntity(TessieBaseEntity):
|
||||
self,
|
||||
vehicle: TessieVehicleData,
|
||||
key: str,
|
||||
data_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Tessie vehicle entity."""
|
||||
self.vin = vehicle.vin
|
||||
@@ -84,12 +84,7 @@ class TessieEntity(TessieBaseEntity):
|
||||
self._attr_unique_id = f"{vehicle.vin}-{key}"
|
||||
self._attr_device_info = vehicle.device
|
||||
|
||||
super().__init__(vehicle.data_coordinator, key)
|
||||
|
||||
@property
|
||||
def _value(self) -> Any:
|
||||
"""Return value from coordinator data."""
|
||||
return self.coordinator.data.get(self.key)
|
||||
super().__init__(vehicle.data_coordinator, key, data_key)
|
||||
|
||||
def set(self, *args: Any) -> None:
|
||||
"""Set a value in coordinator data."""
|
||||
@@ -133,29 +128,14 @@ class TessieEnergyEntity(TessieBaseEntity):
|
||||
data: TessieEnergyData,
|
||||
coordinator: TessieEnergySiteInfoCoordinator | TessieEnergySiteLiveCoordinator,
|
||||
key: str,
|
||||
data_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Tessie energy site entity."""
|
||||
self.api = data.api
|
||||
self._attr_unique_id = f"{data.id}-{key}"
|
||||
self._attr_device_info = data.device
|
||||
|
||||
super().__init__(coordinator, key)
|
||||
|
||||
|
||||
class TessieBatteryEntity(TessieBaseEntity):
|
||||
"""Parent class for Tessie battery health entities."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vehicle: TessieVehicleData,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Tessie battery health entity."""
|
||||
self.vin = vehicle.vin
|
||||
self._attr_unique_id = f"{vehicle.vin}-{key}"
|
||||
self._attr_device_info = vehicle.device
|
||||
|
||||
super().__init__(vehicle.battery_coordinator, key)
|
||||
super().__init__(coordinator, key, data_key)
|
||||
|
||||
|
||||
class TessieEnergyHistoryEntity(TessieBaseEntity):
|
||||
@@ -165,13 +145,14 @@ class TessieEnergyHistoryEntity(TessieBaseEntity):
|
||||
self,
|
||||
data: TessieEnergyData,
|
||||
key: str,
|
||||
data_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Tessie energy history entity."""
|
||||
self.api = data.api
|
||||
self._attr_unique_id = f"{data.id}-{key}"
|
||||
self._attr_device_info = data.device
|
||||
assert data.history_coordinator
|
||||
super().__init__(data.history_coordinator, key)
|
||||
super().__init__(data.history_coordinator, key, data_key)
|
||||
|
||||
|
||||
class TessieWallConnectorEntity(TessieBaseEntity):
|
||||
@@ -182,6 +163,7 @@ class TessieWallConnectorEntity(TessieBaseEntity):
|
||||
data: TessieEnergyData,
|
||||
din: str,
|
||||
key: str,
|
||||
data_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize common aspects of a Teslemetry entity."""
|
||||
self.din = din
|
||||
@@ -194,7 +176,7 @@ class TessieWallConnectorEntity(TessieBaseEntity):
|
||||
serial_number=din.rsplit("-", maxsplit=1)[-1],
|
||||
)
|
||||
assert data.live_coordinator
|
||||
super().__init__(data.live_coordinator, key)
|
||||
super().__init__(data.live_coordinator, key, data_key)
|
||||
|
||||
@property
|
||||
def _value(self) -> int:
|
||||
|
||||
@@ -9,7 +9,6 @@ from tesla_fleet_api.tessie import EnergySite
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
||||
from .coordinator import (
|
||||
TessieBatteryHealthCoordinator,
|
||||
TessieEnergyHistoryCoordinator,
|
||||
TessieEnergySiteInfoCoordinator,
|
||||
TessieEnergySiteLiveCoordinator,
|
||||
@@ -42,6 +41,5 @@ class TessieVehicleData:
|
||||
"""Data for a Tessie vehicle."""
|
||||
|
||||
data_coordinator: TessieStateUpdateCoordinator
|
||||
battery_coordinator: TessieBatteryHealthCoordinator
|
||||
device: DeviceInfo
|
||||
vin: str
|
||||
|
||||
@@ -41,7 +41,6 @@ from .const import (
|
||||
TessieWallConnectorStates,
|
||||
)
|
||||
from .entity import (
|
||||
TessieBatteryEntity,
|
||||
TessieEnergyEntity,
|
||||
TessieEnergyHistoryEntity,
|
||||
TessieEntity,
|
||||
@@ -62,6 +61,7 @@ def minutes_to_datetime(value: StateType) -> datetime | None:
|
||||
class TessieSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Tessie Sensor entity."""
|
||||
|
||||
data_key: str | None = None
|
||||
value_fn: Callable[[StateType], StateType | datetime] = lambda x: x
|
||||
available_fn: Callable[[StateType], bool] = lambda _: True
|
||||
|
||||
@@ -142,6 +142,14 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="phantom_drain_percent",
|
||||
data_key="charge_state_phantom_drain",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="charge_state_energy_remaining",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
@@ -150,6 +158,51 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="lifetime_energy_used",
|
||||
data_key="charge_state_lifetime_energy_used",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="pack_current",
|
||||
data_key="charge_state_pack_current",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="pack_voltage",
|
||||
data_key="charge_state_pack_voltage",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="module_temp_min",
|
||||
data_key="charge_state_module_temp_min",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="module_temp_max",
|
||||
data_key="charge_state_module_temp_max",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="charge_state_conn_charge_cable",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -290,57 +343,6 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
BATTERY_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
TessieSensorEntityDescription(
|
||||
key="phantom_drain_percent",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="lifetime_energy_used",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="pack_current",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="pack_voltage",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="module_temp_min",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
TessieSensorEntityDescription(
|
||||
key="module_temp_max",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
)
|
||||
|
||||
ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
|
||||
TessieSensorEntityDescription(
|
||||
key="solar_power",
|
||||
@@ -494,12 +496,6 @@ async def async_setup_entry(
|
||||
for vehicle in entry.runtime_data.vehicles
|
||||
for description in DESCRIPTIONS
|
||||
),
|
||||
( # Add vehicle battery health
|
||||
TessieBatteryHealthSensorEntity(vehicle, description)
|
||||
for vehicle in entry.runtime_data.vehicles
|
||||
for description in BATTERY_DESCRIPTIONS
|
||||
if description.key in vehicle.battery_coordinator.data
|
||||
),
|
||||
( # Add energy site info
|
||||
TessieEnergyInfoSensorEntity(energysite, description)
|
||||
for energysite in entry.runtime_data.energysites
|
||||
@@ -545,7 +541,7 @@ class TessieVehicleSensorEntity(TessieEntity, SensorEntity):
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
super().__init__(vehicle, description.key)
|
||||
super().__init__(vehicle, description.key, description.data_key)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
@@ -558,25 +554,6 @@ class TessieVehicleSensorEntity(TessieEntity, SensorEntity):
|
||||
return super().available and self.entity_description.available_fn(self.get())
|
||||
|
||||
|
||||
class TessieBatteryHealthSensorEntity(TessieBatteryEntity, SensorEntity):
|
||||
"""Sensor entity for Tessie battery health data."""
|
||||
|
||||
entity_description: TessieSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vehicle: TessieVehicleData,
|
||||
description: TessieSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
super().__init__(vehicle, description.key)
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the sensor."""
|
||||
self._attr_native_value = self.entity_description.value_fn(self._value)
|
||||
|
||||
|
||||
class TessieEnergyLiveSensorEntity(TessieEnergyEntity, SensorEntity):
|
||||
"""Base class for Tessie energy site sensor entity."""
|
||||
|
||||
|
||||
@@ -20,5 +20,16 @@
|
||||
"update_item": {
|
||||
"service": "mdi:clipboard-edit"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"item_added": {
|
||||
"trigger": "mdi:clipboard-plus"
|
||||
},
|
||||
"item_completed": {
|
||||
"trigger": "mdi:clipboard-check"
|
||||
},
|
||||
"item_removed": {
|
||||
"trigger": "mdi:clipboard-minus"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,5 +98,19 @@
|
||||
"name": "Update item"
|
||||
}
|
||||
},
|
||||
"title": "To-do list"
|
||||
"title": "To-do list",
|
||||
"triggers": {
|
||||
"item_added": {
|
||||
"description": "Triggers when a to-do item is added to a list.",
|
||||
"name": "To-do item added"
|
||||
},
|
||||
"item_completed": {
|
||||
"description": "Triggers when a to-do item is marked as done.",
|
||||
"name": "To-do item completed"
|
||||
},
|
||||
"item_removed": {
|
||||
"description": "Triggers when a to-do item is removed from a list.",
|
||||
"name": "To-do item removed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
321
homeassistant/components/todo/trigger.py
Normal file
321
homeassistant/components/todo/trigger.py
Normal file
@@ -0,0 +1,321 @@
|
||||
"""Provides triggers for todo platform."""
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_TARGET
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.target import TargetEntityChangeTracker, TargetSelection
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import TodoItem, TodoListEntity
|
||||
from .const import DATA_COMPONENT, DOMAIN, TodoItemStatus
|
||||
|
||||
ITEM_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_entity(hass: HomeAssistant, entity_id: str) -> TodoListEntity:
|
||||
"""Get the todo entity for the provided entity_id."""
|
||||
component: EntityComponent[TodoListEntity] = hass.data[DATA_COMPONENT]
|
||||
if not (entity := component.get_entity(entity_id)) or not isinstance(
|
||||
entity, TodoListEntity
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"Entity does not exist {entity_id} or is not a todo entity"
|
||||
)
|
||||
return entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class TodoItemChangeEvent:
|
||||
"""Data class for todo item change event."""
|
||||
|
||||
entity_id: str
|
||||
items: list[TodoItem]
|
||||
|
||||
|
||||
class ItemChangeListener(TargetEntityChangeTracker):
|
||||
"""Helper class to listen to todo item changes for target entities."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
target_selection: TargetSelection,
|
||||
listener: Callable[[TodoItemChangeEvent], None],
|
||||
entity_listener: Callable[[str, list[TodoItem] | None], None],
|
||||
) -> None:
|
||||
"""Initialize the item change tracker."""
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
super().__init__(hass, target_selection, entity_filter)
|
||||
self._listener = listener
|
||||
self._entity_listener = entity_listener
|
||||
|
||||
self._pending_listener_task: asyncio.Task[None] | None = None
|
||||
self._unsubscribe_listeners: list[CALLBACK_TYPE] = []
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_entities_update(self, tracked_entities: set[str]) -> None:
|
||||
"""Restart the listeners when the list of entities of the tracked targets is updated."""
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = self._hass.async_create_task(
|
||||
self._start_listening(tracked_entities)
|
||||
)
|
||||
|
||||
async def _start_listening(self, tracked_entities: set[str]) -> None:
|
||||
"""Start listening for todo item changes."""
|
||||
_LOGGER.debug("Tracking items for todos: %s", tracked_entities)
|
||||
for unsub in self._unsubscribe_listeners:
|
||||
unsub()
|
||||
|
||||
def _listener_wrapper(entity_id: str, items: list[TodoItem]) -> None:
|
||||
self._listener(TodoItemChangeEvent(entity_id=entity_id, items=items))
|
||||
|
||||
self._unsubscribe_listeners = []
|
||||
for entity_id in tracked_entities:
|
||||
entity = get_entity(self._hass, entity_id)
|
||||
self._entity_listener(entity_id, entity.todo_items)
|
||||
unsub = entity.async_subscribe_updates(
|
||||
functools.partial(_listener_wrapper, entity_id)
|
||||
)
|
||||
self._unsubscribe_listeners.append(unsub)
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _unsubscribe(self) -> None:
|
||||
"""Unsubscribe from all events."""
|
||||
super()._unsubscribe()
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = None
|
||||
for unsub in self._unsubscribe_listeners:
|
||||
unsub()
|
||||
|
||||
|
||||
class ItemTriggerBase(Trigger, abc.ABC):
|
||||
"""todo item trigger base."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, ITEM_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
self._target = config.target
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
target_selection = TargetSelection(self._target)
|
||||
if not target_selection.has_any_target:
|
||||
raise HomeAssistantError(f"No target defined in {self._target}")
|
||||
listener = ItemChangeListener(
|
||||
self._hass,
|
||||
target_selection,
|
||||
functools.partial(self._handle_item_change, run_action=run_action),
|
||||
self._handle_new_entity,
|
||||
)
|
||||
return listener.async_setup()
|
||||
|
||||
@callback
|
||||
@abc.abstractmethod
|
||||
def _handle_new_entity(self, entity_id: str, items: list[TodoItem] | None) -> None:
|
||||
"""Handle when a new entity starts being tracked.
|
||||
|
||||
Called before subscribing to updates, so the trigger can seed its
|
||||
internal tracking state.
|
||||
"""
|
||||
|
||||
@callback
|
||||
@abc.abstractmethod
|
||||
def _handle_item_change(
|
||||
self, event: TodoItemChangeEvent, run_action: TriggerActionRunner
|
||||
) -> None:
|
||||
"""Handle todo item change event."""
|
||||
|
||||
|
||||
class ItemAddedTrigger(ItemTriggerBase):
|
||||
"""todo item added trigger."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._entity_item_ids: dict[str, set[str] | None] = {}
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_new_entity(self, entity_id: str, items: list[TodoItem] | None) -> None:
|
||||
"""Seed item IDs for a newly tracked entity."""
|
||||
self._entity_item_ids[entity_id] = (
|
||||
{item.uid for item in items if item.uid is not None}
|
||||
if items is not None
|
||||
else None
|
||||
)
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_item_change(
|
||||
self, event: TodoItemChangeEvent, run_action: TriggerActionRunner
|
||||
) -> None:
|
||||
"""Listen for todo item changes."""
|
||||
old_item_ids = self._entity_item_ids.get(event.entity_id)
|
||||
current_item_ids = {item.uid for item in event.items if item.uid is not None}
|
||||
self._entity_item_ids[event.entity_id] = current_item_ids
|
||||
if old_item_ids is None:
|
||||
# Entity just became available, so no old items to compare against
|
||||
return
|
||||
added_item_ids = current_item_ids - old_item_ids
|
||||
if added_item_ids:
|
||||
_LOGGER.debug(
|
||||
"Detected added items with ids %s for entity %s",
|
||||
added_item_ids,
|
||||
event.entity_id,
|
||||
)
|
||||
payload = {
|
||||
ATTR_ENTITY_ID: event.entity_id,
|
||||
"item_ids": sorted(added_item_ids),
|
||||
}
|
||||
run_action(payload, description="todo item added trigger")
|
||||
|
||||
|
||||
class ItemRemovedTrigger(ItemTriggerBase):
|
||||
"""todo item removed trigger."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._entity_item_ids: dict[str, set[str] | None] = {}
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_new_entity(self, entity_id: str, items: list[TodoItem] | None) -> None:
|
||||
"""Seed item IDs for a newly tracked entity."""
|
||||
self._entity_item_ids[entity_id] = (
|
||||
{item.uid for item in items if item.uid is not None}
|
||||
if items is not None
|
||||
else None
|
||||
)
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_item_change(
|
||||
self, event: TodoItemChangeEvent, run_action: TriggerActionRunner
|
||||
) -> None:
|
||||
"""Listen for todo item changes."""
|
||||
old_item_ids = self._entity_item_ids.get(event.entity_id)
|
||||
current_item_ids = {item.uid for item in event.items if item.uid is not None}
|
||||
self._entity_item_ids[event.entity_id] = current_item_ids
|
||||
if old_item_ids is None:
|
||||
# Entity just became available, so no old items to compare against
|
||||
return
|
||||
removed_item_ids = old_item_ids - current_item_ids
|
||||
if removed_item_ids:
|
||||
_LOGGER.debug(
|
||||
"Detected removed items with ids %s for entity %s",
|
||||
removed_item_ids,
|
||||
event.entity_id,
|
||||
)
|
||||
payload = {
|
||||
ATTR_ENTITY_ID: event.entity_id,
|
||||
"item_ids": sorted(removed_item_ids),
|
||||
}
|
||||
run_action(payload, description="todo item removed trigger")
|
||||
|
||||
|
||||
class ItemCompletedTrigger(ItemTriggerBase):
|
||||
"""todo item completed trigger."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._entity_completed_item_ids: dict[str, set[str] | None] = {}
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_new_entity(self, entity_id: str, items: list[TodoItem] | None) -> None:
|
||||
"""Seed completed item IDs for a newly tracked entity."""
|
||||
self._entity_completed_item_ids[entity_id] = (
|
||||
{
|
||||
item.uid
|
||||
for item in items
|
||||
if item.uid is not None and item.status == TodoItemStatus.COMPLETED
|
||||
}
|
||||
if items is not None
|
||||
else None
|
||||
)
|
||||
|
||||
@override
|
||||
@callback
|
||||
def _handle_item_change(
|
||||
self, event: TodoItemChangeEvent, run_action: TriggerActionRunner
|
||||
) -> None:
|
||||
"""Listen for todo item changes."""
|
||||
old_item_ids = self._entity_completed_item_ids.get(event.entity_id)
|
||||
current_item_ids = {
|
||||
item.uid
|
||||
for item in event.items
|
||||
if item.uid is not None and item.status == TodoItemStatus.COMPLETED
|
||||
}
|
||||
self._entity_completed_item_ids[event.entity_id] = current_item_ids
|
||||
if old_item_ids is None:
|
||||
# Entity just became available, so no old items to compare against
|
||||
return
|
||||
new_completed_item_ids = current_item_ids - old_item_ids
|
||||
if new_completed_item_ids:
|
||||
_LOGGER.debug(
|
||||
"Detected new completed items with ids %s for entity %s",
|
||||
new_completed_item_ids,
|
||||
event.entity_id,
|
||||
)
|
||||
payload = {
|
||||
ATTR_ENTITY_ID: event.entity_id,
|
||||
"item_ids": sorted(new_completed_item_ids),
|
||||
}
|
||||
run_action(payload, description="todo item completed trigger")
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"item_added": ItemAddedTrigger,
|
||||
"item_completed": ItemCompletedTrigger,
|
||||
"item_removed": ItemRemovedTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for todo platform."""
|
||||
return TRIGGERS
|
||||
8
homeassistant/components/todo/triggers.yaml
Normal file
8
homeassistant/components/todo/triggers.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: todo
|
||||
|
||||
item_added: *trigger_common
|
||||
item_completed: *trigger_common
|
||||
item_removed: *trigger_common
|
||||
@@ -4,8 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from pyvlx.exception import PyVLXException
|
||||
from pyvlx.opening_device import OpeningDevice, Window
|
||||
from pyvlx import OpeningDevice, Position, PyVLXException, Window
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -55,7 +54,7 @@ class VeluxRainSensor(VeluxEntity, BinarySensorEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Fetch the latest state from the device."""
|
||||
try:
|
||||
limitation = await self.node.get_limitation()
|
||||
limitation: Position = await self.node.get_limitation_min()
|
||||
except (OSError, PyVLXException) as err:
|
||||
if not self._unavailable_logged:
|
||||
LOGGER.warning(
|
||||
@@ -78,4 +77,4 @@ class VeluxRainSensor(VeluxEntity, BinarySensorEntity):
|
||||
# So far we've seen 89, 91, 93 (most cases) or 100 (Velux GPU). It probably makes sense to
|
||||
# assume that any large enough limitation (we use >=89) means rain is detected.
|
||||
# Documentation on this is non-existent AFAIK.
|
||||
self._attr_is_on = limitation.min_value >= 89
|
||||
self._attr_is_on = limitation.position_percent >= 89
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "velux",
|
||||
"name": "Velux",
|
||||
"codeowners": ["@Julius2342", "@DeerMaximum", "@pawlizio", "@wollew"],
|
||||
"codeowners": ["@Julius2342", "@pawlizio", "@wollew"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
@@ -14,5 +14,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyvlx"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyvlx==0.2.30"]
|
||||
"requirements": ["pyvlx==0.2.32"]
|
||||
}
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["PyViCare"],
|
||||
"requirements": ["PyViCare==2.58.0"]
|
||||
"requirements": ["PyViCare==2.58.1"]
|
||||
}
|
||||
|
||||
@@ -4,10 +4,12 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from sensor_state_data import SensorUpdate
|
||||
from victron_ble_ha_parser import VictronBluetoothDeviceData
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothScanningMode,
|
||||
BluetoothServiceInfoBleak,
|
||||
async_rediscover_address,
|
||||
)
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
@@ -17,6 +19,8 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import REAUTH_AFTER_FAILURES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -26,12 +30,38 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
assert address is not None
|
||||
key = entry.data[CONF_ACCESS_TOKEN]
|
||||
data = VictronBluetoothDeviceData(key)
|
||||
consecutive_failures = 0
|
||||
|
||||
def _update(
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
) -> SensorUpdate:
|
||||
nonlocal consecutive_failures
|
||||
update = data.update(service_info)
|
||||
|
||||
# If the device type was recognized (devices dict populated) but
|
||||
# only signal strength came back, decryption likely failed.
|
||||
# Unsupported devices have an empty devices dict and won't trigger this.
|
||||
if update.devices and len(update.entity_values) <= 1:
|
||||
consecutive_failures += 1
|
||||
if consecutive_failures >= REAUTH_AFTER_FAILURES:
|
||||
_LOGGER.debug(
|
||||
"Triggering reauth for %s after %d consecutive failures",
|
||||
address,
|
||||
consecutive_failures,
|
||||
)
|
||||
entry.async_start_reauth(hass)
|
||||
consecutive_failures = 0
|
||||
else:
|
||||
consecutive_failures = 0
|
||||
|
||||
return update
|
||||
|
||||
coordinator = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.ACTIVE,
|
||||
update_method=data.update,
|
||||
update_method=_update,
|
||||
)
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -123,3 +124,42 @@ class VictronBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices)}
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, _entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by a reauth event."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirmation with a new encryption key."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
device = VictronBluetoothDeviceData(user_input[CONF_ACCESS_TOKEN])
|
||||
|
||||
# Find the current advertisement data for this device
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
if discovery_info.address == reauth_entry.unique_id:
|
||||
mfr_data = discovery_info.manufacturer_data.get(VICTRON_IDENTIFIER)
|
||||
if mfr_data is None or not device.validate_advertisement_key(
|
||||
mfr_data
|
||||
):
|
||||
errors["base"] = "invalid_access_token"
|
||||
break
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN]},
|
||||
)
|
||||
else:
|
||||
errors["base"] = "no_devices_found"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_ACCESS_TOKEN_DATA_SCHEMA,
|
||||
description_placeholders={"title": reauth_entry.title},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Constants for the Victron Bluetooth Low Energy integration."""
|
||||
|
||||
DOMAIN = "victron_ble"
|
||||
REAUTH_AFTER_FAILURES = 3
|
||||
VICTRON_IDENTIFIER = 0x02E1
|
||||
|
||||
@@ -8,10 +8,15 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"invalid_access_token": "Invalid encryption key for instant readout",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"not_supported": "Device not supported",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"invalid_access_token": "Invalid encryption key for instant readout"
|
||||
"invalid_access_token": "Invalid encryption key for instant readout",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
},
|
||||
"flow_title": "{title}",
|
||||
"step": {
|
||||
@@ -24,6 +29,15 @@
|
||||
},
|
||||
"title": "{title}"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"access_token": "[%key:component::victron_ble::config::step::access_token::data::access_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "[%key:component::victron_ble::config::step::access_token::data_description::access_token%]"
|
||||
},
|
||||
"description": "The encryption key for {title} is invalid or has changed. Please enter the correct key."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"address": "The Bluetooth address of the Victron device."
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
ENTITY_ID_FORMAT,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
@@ -19,7 +18,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import DOMAIN, WaterFurnaceConfigEntry
|
||||
from .coordinator import WaterFurnaceCoordinator
|
||||
@@ -178,10 +176,6 @@ class WaterFurnaceSensor(CoordinatorEntity[WaterFurnaceCoordinator], SensorEntit
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
|
||||
# This ensures that the sensors are isolated per waterfurnace unit
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(
|
||||
f"wf_{slugify(coordinator.unit)}_{slugify(description.key)}"
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.unit}_{description.key}"
|
||||
|
||||
device_info = DeviceInfo(
|
||||
|
||||
128
homeassistant/components/wiim/__init__.py
Normal file
128
homeassistant/components/wiim/__init__.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""The WiiM integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from wiim.controller import WiimController
|
||||
from wiim.discovery import async_create_wiim_device
|
||||
from wiim.exceptions import WiimDeviceException, WiimRequestException
|
||||
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
|
||||
from .const import DATA_WIIM, DOMAIN, LOGGER, PLATFORMS, UPNP_PORT, WiimConfigEntry
|
||||
from .models import WiimData
|
||||
|
||||
DEFAULT_AVAILABILITY_POLLING_INTERVAL = 60
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: WiimConfigEntry) -> bool:
|
||||
"""Set up WiiM from a config entry.
|
||||
|
||||
This method owns the device connect/disconnect lifecycle.
|
||||
"""
|
||||
LOGGER.debug(
|
||||
"Setting up WiiM entry: %s (UDN: %s, Source: %s)",
|
||||
entry.title,
|
||||
entry.unique_id,
|
||||
entry.source,
|
||||
)
|
||||
|
||||
# This integration maintains shared domain-level state because:
|
||||
# - Multiple config entries can be loaded simultaneously.
|
||||
# - All WiiM devices share a single WiimController instance
|
||||
# to coordinate network communication and event handling.
|
||||
# - We also maintain a global entity_id -> UDN mapping
|
||||
# used for cross-entity event routing.
|
||||
#
|
||||
# The domain data must therefore be initialized once and reused
|
||||
# across all config entries.
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
if DATA_WIIM not in hass.data:
|
||||
hass.data[DATA_WIIM] = WiimData(controller=WiimController(session))
|
||||
|
||||
wiim_domain_data = hass.data[DATA_WIIM]
|
||||
controller = wiim_domain_data.controller
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
upnp_location = f"http://{host}:{UPNP_PORT}/description.xml"
|
||||
|
||||
try:
|
||||
base_url = get_url(hass, prefer_external=False)
|
||||
except NoURLAvailableError as err:
|
||||
raise ConfigEntryNotReady("Failed to determine Home Assistant URL") from err
|
||||
|
||||
local_host = urlparse(base_url).hostname
|
||||
if TYPE_CHECKING:
|
||||
assert local_host is not None
|
||||
|
||||
try:
|
||||
wiim_device = await async_create_wiim_device(
|
||||
upnp_location,
|
||||
session,
|
||||
host=host,
|
||||
local_host=local_host,
|
||||
polling_interval=DEFAULT_AVAILABILITY_POLLING_INTERVAL,
|
||||
)
|
||||
except WiimRequestException as err:
|
||||
raise ConfigEntryNotReady(f"HTTP API request failed for {host}: {err}") from err
|
||||
except WiimDeviceException as err:
|
||||
raise ConfigEntryNotReady(f"Device setup failed for {host}: {err}") from err
|
||||
|
||||
await controller.add_device(wiim_device)
|
||||
|
||||
entry.runtime_data = wiim_device
|
||||
LOGGER.info(
|
||||
"WiiM device %s (UDN: %s) linked to HASS. Name: '%s', HTTP: %s, UPnP Location: %s",
|
||||
entry.entry_id,
|
||||
wiim_device.udn,
|
||||
wiim_device.name,
|
||||
host,
|
||||
upnp_location or "N/A",
|
||||
)
|
||||
|
||||
async def _async_shutdown_event_handler(event: Event) -> None:
|
||||
LOGGER.info(
|
||||
"Home Assistant stopping, disconnecting WiiM device: %s",
|
||||
wiim_device.name,
|
||||
)
|
||||
await wiim_device.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _async_shutdown_event_handler
|
||||
)
|
||||
)
|
||||
|
||||
async def _unload_entry_cleanup():
|
||||
"""Cleanup when unloading the config entry.
|
||||
|
||||
Removes the device from the controller and disconnects it.
|
||||
"""
|
||||
LOGGER.debug("Running unload cleanup for %s", wiim_device.name)
|
||||
await controller.remove_device(wiim_device.udn)
|
||||
await wiim_device.disconnect()
|
||||
|
||||
entry.async_on_unload(_unload_entry_cleanup)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: WiimConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
LOGGER.info("Unloading WiiM entry: %s (UDN: %s)", entry.title, entry.unique_id)
|
||||
|
||||
if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
return False
|
||||
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
hass.data.pop(DATA_WIIM)
|
||||
LOGGER.info("Last WiiM entry unloaded, cleaning up domain data")
|
||||
return True
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user