mirror of
https://github.com/home-assistant/core.git
synced 2025-08-06 14:15:12 +02:00
Merge branch 'dev' into dev
This commit is contained in:
@@ -6,11 +6,12 @@
|
||||
source = homeassistant
|
||||
omit =
|
||||
homeassistant/__main__.py
|
||||
homeassistant/helpers/backports/aiohttp_resolver.py
|
||||
homeassistant/helpers/signal.py
|
||||
homeassistant/scripts/__init__.py
|
||||
homeassistant/scripts/benchmark/__init__.py
|
||||
homeassistant/scripts/check_config.py
|
||||
homeassistant/scripts/ensure_config.py
|
||||
homeassistant/scripts/benchmark/__init__.py
|
||||
homeassistant/scripts/macos/__init__.py
|
||||
|
||||
# omit pieces of code that rely on external devices being present
|
||||
|
@@ -4,7 +4,10 @@
|
||||
"dockerFile": "../Dockerfile.dev",
|
||||
"postCreateCommand": "script/setup",
|
||||
"postStartCommand": "script/bootstrap",
|
||||
"containerEnv": { "DEVCONTAINER": "1" },
|
||||
"containerEnv": {
|
||||
"DEVCONTAINER": "1",
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
"appPort": ["8123:8123", "5683:5683/udp"],
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
|
||||
|
46
.github/workflows/ci.yaml
vendored
46
.github/workflows/ci.yaml
vendored
@@ -94,7 +94,7 @@ jobs:
|
||||
id: generate_python_cache_key
|
||||
run: >-
|
||||
echo "key=venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('requirements_test.txt') }}-${{
|
||||
hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
@@ -1088,25 +1088,17 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: Wandalen/wretry.action@v3.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (partial coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: Wandalen/wretry.action@v3.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -1234,22 +1226,14 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: Wandalen/wretry.action@v3.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (partial coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: Wandalen/wretry.action@v3.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.5
|
||||
rev: v0.3.7
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -15,7 +15,7 @@ repos:
|
||||
- --ignore-words-list=additionals,alle,alot,astroid,bund,caf,convencional,currenty,datas,farenheit,falsy,fo,frequence,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,vor,withing,zar
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json]
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
|
@@ -363,6 +363,7 @@ homeassistant.components.rest_command.*
|
||||
homeassistant.components.rfxtrx.*
|
||||
homeassistant.components.rhasspy.*
|
||||
homeassistant.components.ridwell.*
|
||||
homeassistant.components.ring.*
|
||||
homeassistant.components.rituals_perfume_genie.*
|
||||
homeassistant.components.roku.*
|
||||
homeassistant.components.romy.*
|
||||
|
@@ -28,6 +28,7 @@ from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRA
|
||||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||
from .models import AuthFlowResult
|
||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||
from .session import SessionManager
|
||||
|
||||
EVENT_USER_ADDED = "user_added"
|
||||
EVENT_USER_UPDATED = "user_updated"
|
||||
@@ -85,7 +86,7 @@ async def auth_manager_from_config(
|
||||
module_hash[module.id] = module
|
||||
|
||||
manager = AuthManager(hass, store, provider_hash, module_hash)
|
||||
manager.async_setup()
|
||||
await manager.async_setup()
|
||||
return manager
|
||||
|
||||
|
||||
@@ -180,9 +181,9 @@ class AuthManager:
|
||||
self._remove_expired_job = HassJob(
|
||||
self._async_remove_expired_refresh_tokens, job_type=HassJobType.Callback
|
||||
)
|
||||
self.session = SessionManager(hass, self)
|
||||
|
||||
@callback
|
||||
def async_setup(self) -> None:
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the auth manager."""
|
||||
hass = self.hass
|
||||
hass.async_add_shutdown_job(
|
||||
@@ -191,6 +192,7 @@ class AuthManager:
|
||||
)
|
||||
)
|
||||
self._async_track_next_refresh_token_expiration()
|
||||
await self.session.async_setup()
|
||||
|
||||
@property
|
||||
def auth_providers(self) -> list[AuthProvider]:
|
||||
|
205
homeassistant/auth/session.py
Normal file
205
homeassistant/auth/session.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""Session auth module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import secrets
|
||||
from typing import TYPE_CHECKING, Final, TypedDict
|
||||
|
||||
from aiohttp.web import Request
|
||||
from aiohttp_session import Session, get_session, new_session
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .models import RefreshToken
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AuthManager
|
||||
|
||||
|
||||
TEMP_TIMEOUT = timedelta(minutes=5)
|
||||
TEMP_TIMEOUT_SECONDS = TEMP_TIMEOUT.total_seconds()
|
||||
|
||||
SESSION_ID = "id"
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = "auth.session"
|
||||
|
||||
|
||||
class StrictConnectionTempSessionData:
|
||||
"""Data for accessing unauthorized resources for a short period of time."""
|
||||
|
||||
__slots__ = ("cancel_remove", "absolute_expiry")
|
||||
|
||||
def __init__(self, cancel_remove: CALLBACK_TYPE) -> None:
|
||||
"""Initialize the temp session data."""
|
||||
self.cancel_remove: Final[CALLBACK_TYPE] = cancel_remove
|
||||
self.absolute_expiry: Final[datetime] = dt_util.utcnow() + TEMP_TIMEOUT
|
||||
|
||||
|
||||
class StoreData(TypedDict):
|
||||
"""Data to store."""
|
||||
|
||||
unauthorized_sessions: dict[str, str]
|
||||
key: str
|
||||
|
||||
|
||||
class SessionManager:
|
||||
"""Session manager."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, auth: AuthManager) -> None:
|
||||
"""Initialize the strict connection manager."""
|
||||
self._auth = auth
|
||||
self._hass = hass
|
||||
self._temp_sessions: dict[str, StrictConnectionTempSessionData] = {}
|
||||
self._strict_connection_sessions: dict[str, str] = {}
|
||||
self._store = Store[StoreData](
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
|
||||
)
|
||||
self._key: str | None = None
|
||||
self._refresh_token_revoke_callbacks: dict[str, CALLBACK_TYPE] = {}
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
"""Return the encryption key."""
|
||||
if self._key is None:
|
||||
self._key = Fernet.generate_key().decode()
|
||||
self._async_schedule_save()
|
||||
return self._key
|
||||
|
||||
async def async_validate_request_for_strict_connection_session(
|
||||
self,
|
||||
request: Request,
|
||||
) -> bool:
|
||||
"""Check if a request has a valid strict connection session."""
|
||||
session = await get_session(request)
|
||||
if session.new or session.empty:
|
||||
return False
|
||||
result = self.async_validate_strict_connection_session(session)
|
||||
if result is False:
|
||||
session.invalidate()
|
||||
return result
|
||||
|
||||
@callback
|
||||
def async_validate_strict_connection_session(
|
||||
self,
|
||||
session: Session,
|
||||
) -> bool:
|
||||
"""Validate a strict connection session."""
|
||||
if not (session_id := session.get(SESSION_ID)):
|
||||
return False
|
||||
|
||||
if token_id := self._strict_connection_sessions.get(session_id):
|
||||
if self._auth.async_get_refresh_token(token_id):
|
||||
return True
|
||||
# refresh token is invalid, delete entry
|
||||
self._strict_connection_sessions.pop(session_id)
|
||||
self._async_schedule_save()
|
||||
|
||||
if data := self._temp_sessions.get(session_id):
|
||||
if dt_util.utcnow() <= data.absolute_expiry:
|
||||
return True
|
||||
# session expired, delete entry
|
||||
self._temp_sessions.pop(session_id).cancel_remove()
|
||||
|
||||
return False
|
||||
|
||||
@callback
|
||||
def _async_register_revoke_token_callback(self, refresh_token_id: str) -> None:
|
||||
"""Register a callback to revoke all sessions for a refresh token."""
|
||||
if refresh_token_id in self._refresh_token_revoke_callbacks:
|
||||
return
|
||||
|
||||
@callback
|
||||
def async_invalidate_auth_sessions() -> None:
|
||||
"""Invalidate all sessions for a refresh token."""
|
||||
self._strict_connection_sessions = {
|
||||
session_id: token_id
|
||||
for session_id, token_id in self._strict_connection_sessions.items()
|
||||
if token_id != refresh_token_id
|
||||
}
|
||||
self._async_schedule_save()
|
||||
|
||||
self._refresh_token_revoke_callbacks[refresh_token_id] = (
|
||||
self._auth.async_register_revoke_token_callback(
|
||||
refresh_token_id, async_invalidate_auth_sessions
|
||||
)
|
||||
)
|
||||
|
||||
async def async_create_session(
|
||||
self,
|
||||
request: Request,
|
||||
refresh_token: RefreshToken,
|
||||
) -> None:
|
||||
"""Create new session for given refresh token.
|
||||
|
||||
Caller needs to make sure that the refresh token is valid.
|
||||
By creating a session, we are implicitly revoking all other
|
||||
sessions for the given refresh token as there is one refresh
|
||||
token per device/user case.
|
||||
"""
|
||||
self._strict_connection_sessions = {
|
||||
session_id: token_id
|
||||
for session_id, token_id in self._strict_connection_sessions.items()
|
||||
if token_id != refresh_token.id
|
||||
}
|
||||
|
||||
self._async_register_revoke_token_callback(refresh_token.id)
|
||||
session_id = await self._async_create_new_session(request)
|
||||
self._strict_connection_sessions[session_id] = refresh_token.id
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_create_temp_unauthorized_session(self, request: Request) -> None:
|
||||
"""Create a temporary unauthorized session."""
|
||||
session_id = await self._async_create_new_session(
|
||||
request, max_age=int(TEMP_TIMEOUT_SECONDS)
|
||||
)
|
||||
|
||||
@callback
|
||||
def remove(_: datetime) -> None:
|
||||
self._temp_sessions.pop(session_id, None)
|
||||
|
||||
self._temp_sessions[session_id] = StrictConnectionTempSessionData(
|
||||
async_call_later(self._hass, TEMP_TIMEOUT_SECONDS, remove)
|
||||
)
|
||||
|
||||
async def _async_create_new_session(
|
||||
self,
|
||||
request: Request,
|
||||
*,
|
||||
max_age: int | None = None,
|
||||
) -> str:
|
||||
session_id = secrets.token_hex(64)
|
||||
|
||||
session = await new_session(request)
|
||||
session[SESSION_ID] = session_id
|
||||
if max_age is not None:
|
||||
session.max_age = max_age
|
||||
return session_id
|
||||
|
||||
@callback
|
||||
def _async_schedule_save(self, delay: float = 1) -> None:
|
||||
"""Save sessions."""
|
||||
self._store.async_delay_save(self._data_to_save, delay)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> StoreData:
|
||||
"""Return the data to store."""
|
||||
return StoreData(
|
||||
unauthorized_sessions=self._strict_connection_sessions,
|
||||
key=self.key,
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up session manager."""
|
||||
data = await self._store.async_load()
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self._key = data["key"]
|
||||
self._strict_connection_sessions = data["unauthorized_sessions"]
|
||||
for token_id in self._strict_connection_sessions.values():
|
||||
self._async_register_revoke_token_callback(token_id)
|
@@ -284,7 +284,8 @@ class APIEntityStateView(HomeAssistantView):
|
||||
|
||||
# Read the state back for our response
|
||||
status_code = HTTPStatus.CREATED if is_new_state else HTTPStatus.OK
|
||||
assert (state := hass.states.get(entity_id))
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
resp = self.json(state.as_dict(), status_code)
|
||||
|
||||
resp.headers.add("Location", f"/api/states/{entity_id}")
|
||||
|
@@ -162,6 +162,7 @@ from homeassistant.util import dt as dt_util
|
||||
from . import indieauth, login_flow, mfa_setup_flow
|
||||
|
||||
DOMAIN = "auth"
|
||||
STRICT_CONNECTION_URL = "/auth/strict_connection/temp_token"
|
||||
|
||||
StoreResultType = Callable[[str, Credentials], str]
|
||||
RetrieveResultType = Callable[[str, str], Credentials | None]
|
||||
@@ -187,6 +188,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.register_view(RevokeTokenView())
|
||||
hass.http.register_view(LinkUserView(retrieve_result))
|
||||
hass.http.register_view(OAuth2AuthorizeCallbackView())
|
||||
hass.http.register_view(StrictConnectionTempTokenView())
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_current_user)
|
||||
websocket_api.async_register_command(hass, websocket_create_long_lived_access_token)
|
||||
@@ -195,8 +197,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_delete_all_refresh_tokens)
|
||||
websocket_api.async_register_command(hass, websocket_sign_path)
|
||||
|
||||
await login_flow.async_setup(hass, store_result)
|
||||
await mfa_setup_flow.async_setup(hass)
|
||||
login_flow.async_setup(hass, store_result)
|
||||
mfa_setup_flow.async_setup(hass)
|
||||
|
||||
return True
|
||||
|
||||
@@ -260,10 +262,10 @@ class TokenView(HomeAssistantView):
|
||||
return await RevokeTokenView.post(self, request) # type: ignore[arg-type]
|
||||
|
||||
if grant_type == "authorization_code":
|
||||
return await self._async_handle_auth_code(hass, data, request.remote)
|
||||
return await self._async_handle_auth_code(hass, data, request)
|
||||
|
||||
if grant_type == "refresh_token":
|
||||
return await self._async_handle_refresh_token(hass, data, request.remote)
|
||||
return await self._async_handle_refresh_token(hass, data, request)
|
||||
|
||||
return self.json(
|
||||
{"error": "unsupported_grant_type"}, status_code=HTTPStatus.BAD_REQUEST
|
||||
@@ -273,7 +275,7 @@ class TokenView(HomeAssistantView):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
data: MultiDictProxy[str],
|
||||
remote_addr: str | None,
|
||||
request: web.Request,
|
||||
) -> web.Response:
|
||||
"""Handle authorization code request."""
|
||||
client_id = data.get("client_id")
|
||||
@@ -313,7 +315,7 @@ class TokenView(HomeAssistantView):
|
||||
)
|
||||
try:
|
||||
access_token = hass.auth.async_create_access_token(
|
||||
refresh_token, remote_addr
|
||||
refresh_token, request.remote
|
||||
)
|
||||
except InvalidAuthError as exc:
|
||||
return self.json(
|
||||
@@ -321,6 +323,7 @@ class TokenView(HomeAssistantView):
|
||||
status_code=HTTPStatus.FORBIDDEN,
|
||||
)
|
||||
|
||||
await hass.auth.session.async_create_session(request, refresh_token)
|
||||
return self.json(
|
||||
{
|
||||
"access_token": access_token,
|
||||
@@ -341,9 +344,9 @@ class TokenView(HomeAssistantView):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
data: MultiDictProxy[str],
|
||||
remote_addr: str | None,
|
||||
request: web.Request,
|
||||
) -> web.Response:
|
||||
"""Handle authorization code request."""
|
||||
"""Handle refresh token request."""
|
||||
client_id = data.get("client_id")
|
||||
if client_id is not None and not indieauth.verify_client_id(client_id):
|
||||
return self.json(
|
||||
@@ -381,7 +384,7 @@ class TokenView(HomeAssistantView):
|
||||
|
||||
try:
|
||||
access_token = hass.auth.async_create_access_token(
|
||||
refresh_token, remote_addr
|
||||
refresh_token, request.remote
|
||||
)
|
||||
except InvalidAuthError as exc:
|
||||
return self.json(
|
||||
@@ -389,6 +392,7 @@ class TokenView(HomeAssistantView):
|
||||
status_code=HTTPStatus.FORBIDDEN,
|
||||
)
|
||||
|
||||
await hass.auth.session.async_create_session(request, refresh_token)
|
||||
return self.json(
|
||||
{
|
||||
"access_token": access_token,
|
||||
@@ -437,6 +441,20 @@ class LinkUserView(HomeAssistantView):
|
||||
return self.json_message("User linked")
|
||||
|
||||
|
||||
class StrictConnectionTempTokenView(HomeAssistantView):
|
||||
"""View to get temporary strict connection token."""
|
||||
|
||||
url = STRICT_CONNECTION_URL
|
||||
name = "api:auth:strict_connection:temp_token"
|
||||
requires_auth = False
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Get a temporary token and redirect to main page."""
|
||||
hass = request.app[KEY_HASS]
|
||||
await hass.auth.session.async_create_temp_unauthorized_session(request)
|
||||
raise web.HTTPSeeOther(location="/")
|
||||
|
||||
|
||||
@callback
|
||||
def _create_auth_code_store() -> tuple[StoreResultType, RetrieveResultType]:
|
||||
"""Create an in memory store."""
|
||||
|
@@ -91,7 +91,7 @@ from homeassistant.components.http.ban import (
|
||||
)
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.util.network import is_local
|
||||
|
||||
@@ -105,7 +105,8 @@ if TYPE_CHECKING:
|
||||
from . import StoreResultType
|
||||
|
||||
|
||||
async def async_setup(
|
||||
@callback
|
||||
def async_setup(
|
||||
hass: HomeAssistant, store_result: Callable[[str, Credentials], str]
|
||||
) -> None:
|
||||
"""Component to allow users to login."""
|
||||
|
@@ -62,7 +62,8 @@ class MfaFlowManager(data_entry_flow.FlowManager):
|
||||
return result
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Init mfa setup flow manager."""
|
||||
hass.data[DATA_SETUP_FLOW_MGR] = MfaFlowManager(hass)
|
||||
|
||||
|
@@ -604,18 +604,20 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
)
|
||||
|
||||
if enable_automation:
|
||||
await self.async_enable()
|
||||
await self._async_enable()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on and update the state."""
|
||||
await self.async_enable()
|
||||
await self._async_enable()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
if CONF_STOP_ACTIONS in kwargs:
|
||||
await self.async_disable(kwargs[CONF_STOP_ACTIONS])
|
||||
await self._async_disable(kwargs[CONF_STOP_ACTIONS])
|
||||
else:
|
||||
await self.async_disable()
|
||||
await self._async_disable()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_trigger(
|
||||
self,
|
||||
@@ -743,7 +745,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove listeners when removing automation from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
await self.async_disable()
|
||||
await self._async_disable()
|
||||
|
||||
async def _async_enable_automation(self, event: Event) -> None:
|
||||
"""Start automation on startup."""
|
||||
@@ -752,31 +754,34 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
return
|
||||
|
||||
self._async_detach_triggers = await self._async_attach_triggers(True)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_enable(self) -> None:
|
||||
async def _async_enable(self) -> None:
|
||||
"""Enable this automation entity.
|
||||
|
||||
This method is a coroutine.
|
||||
This method is not expected to write state to the
|
||||
state machine.
|
||||
"""
|
||||
if self._is_enabled:
|
||||
return
|
||||
|
||||
self._is_enabled = True
|
||||
|
||||
# HomeAssistant is starting up
|
||||
if self.hass.state is not CoreState.not_running:
|
||||
self._async_detach_triggers = await self._async_attach_triggers(False)
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
self.hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
self._async_enable_automation,
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_disable(self, stop_actions: bool = DEFAULT_STOP_ACTIONS) -> None:
|
||||
"""Disable the automation entity."""
|
||||
async def _async_disable(self, stop_actions: bool = DEFAULT_STOP_ACTIONS) -> None:
|
||||
"""Disable the automation entity.
|
||||
|
||||
This method is not expected to write state to the
|
||||
state machine.
|
||||
"""
|
||||
if not self._is_enabled and not self.action_script.runs:
|
||||
return
|
||||
|
||||
@@ -789,18 +794,31 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
if stop_actions:
|
||||
await self.action_script.async_stop()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _log_callback(self, level: int, msg: str, **kwargs: Any) -> None:
|
||||
"""Log helper callback."""
|
||||
self._logger.log(level, "%s %s", msg, self.name, **kwargs)
|
||||
|
||||
async def _async_trigger_if_enabled(
|
||||
self,
|
||||
run_variables: dict[str, Any],
|
||||
context: Context | None = None,
|
||||
skip_condition: bool = False,
|
||||
) -> ScriptRunResult | None:
|
||||
"""Trigger automation if enabled.
|
||||
|
||||
If the trigger starts but has a delay, the automation will be triggered
|
||||
when the delay has passed so we need to make sure its still enabled before
|
||||
executing the action.
|
||||
"""
|
||||
if not self._is_enabled:
|
||||
return None
|
||||
return await self.async_trigger(run_variables, context, skip_condition)
|
||||
|
||||
async def _async_attach_triggers(
|
||||
self, home_assistant_start: bool
|
||||
) -> Callable[[], None] | None:
|
||||
"""Set up the triggers."""
|
||||
this = None
|
||||
self.async_write_ha_state()
|
||||
if state := self.hass.states.get(self.entity_id):
|
||||
this = state.as_dict()
|
||||
variables = {"this": this}
|
||||
@@ -818,7 +836,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
return await async_initialize_triggers(
|
||||
self.hass,
|
||||
self._trigger_config,
|
||||
self.async_trigger,
|
||||
self._async_trigger_if_enabled,
|
||||
DOMAIN,
|
||||
str(self.name),
|
||||
self._log_callback,
|
||||
|
@@ -118,8 +118,8 @@ class AzureDevOpsEntity(CoordinatorEntity[DataUpdateCoordinator[list[DevOpsBuild
|
||||
"""Initialize the Azure DevOps entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id: str = "_".join(
|
||||
[entity_description.organization, entity_description.key]
|
||||
self._attr_unique_id: str = (
|
||||
f"{entity_description.organization}_{entity_description.key}"
|
||||
)
|
||||
self._organization: str = entity_description.organization
|
||||
self._project_name: str = entity_description.project.name
|
||||
|
@@ -18,7 +18,13 @@ from .const import CONF_SYNC_TIME, DEFAULT_SYNC_TIME, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.FAN, Platform.LIGHT]
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.FAN,
|
||||
Platform.LIGHT,
|
||||
Platform.SELECT,
|
||||
]
|
||||
|
||||
|
||||
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
||||
|
@@ -27,6 +27,11 @@
|
||||
"off": "mdi:pump-off"
|
||||
}
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"temperature_range": {
|
||||
"default": "mdi:thermometer-lines"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
52
homeassistant/components/balboa/select.py
Normal file
52
homeassistant/components/balboa/select.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Support for Spa Client selects."""
|
||||
|
||||
from pybalboa import SpaClient, SpaControl
|
||||
from pybalboa.enums import LowHighRange
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import BalboaEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up the spa select entity."""
|
||||
spa: SpaClient = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities([BalboaTempRangeSelectEntity(spa.temperature_range)])
|
||||
|
||||
|
||||
class BalboaTempRangeSelectEntity(BalboaEntity, SelectEntity):
|
||||
"""Representation of a Temperature Range select."""
|
||||
|
||||
_attr_icon = "mdi:thermometer-lines"
|
||||
_attr_name = "Temperature range"
|
||||
_attr_unique_id = "temperature_range"
|
||||
_attr_translation_key = "temperature_range"
|
||||
_attr_options = [
|
||||
LowHighRange.LOW.name.lower(),
|
||||
LowHighRange.HIGH.name.lower(),
|
||||
]
|
||||
|
||||
def __init__(self, control: SpaControl) -> None:
|
||||
"""Initialise the select."""
|
||||
super().__init__(control.client, "TempHiLow")
|
||||
self._control = control
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return current select option."""
|
||||
if self._control.state == LowHighRange.HIGH:
|
||||
return LowHighRange.HIGH.name.lower()
|
||||
return LowHighRange.LOW.name.lower()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Select temperature range high/low mode."""
|
||||
if option == LowHighRange.HIGH.name.lower():
|
||||
await self._client.set_temperature_range(LowHighRange.HIGH)
|
||||
else:
|
||||
await self._client.set_temperature_range(LowHighRange.LOW)
|
@@ -65,6 +65,15 @@
|
||||
"only_light": {
|
||||
"name": "Light"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"temperature_range": {
|
||||
"name": "Temperature range",
|
||||
"state": {
|
||||
"low": "Low",
|
||||
"high": "High"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -201,9 +201,9 @@ async def async_setup_platform(
|
||||
|
||||
if radius > dist or stations_list.intersection((station_id, station_uid)):
|
||||
if name:
|
||||
uid = "_".join([network.network_id, name, station_id])
|
||||
uid = f"{network.network_id}_{name}_{station_id}"
|
||||
else:
|
||||
uid = "_".join([network.network_id, station_id])
|
||||
uid = f"{network.network_id}_{station_id}"
|
||||
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, uid, hass=hass)
|
||||
devices.append(CityBikesStation(network, station_id, entity_id))
|
||||
|
||||
|
@@ -33,7 +33,7 @@ class HVACMode(StrEnum):
|
||||
# Device is in Dry/Humidity mode
|
||||
DRY = "dry"
|
||||
|
||||
# Only the fan is on, not fan and another mode like cool
|
||||
# Only the fan is on, not fan and another mode like cool
|
||||
FAN_ONLY = "fan_only"
|
||||
|
||||
|
||||
|
@@ -18,10 +18,6 @@ class ColorExtractorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title=DEFAULT_NAME, data={})
|
||||
|
||||
return self.async_show_form(step_id="user")
|
||||
|
@@ -4,5 +4,6 @@
|
||||
"codeowners": ["@GenericStudent"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/color_extractor",
|
||||
"requirements": ["colorthief==0.2.1"]
|
||||
"requirements": ["colorthief==0.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -4,9 +4,6 @@
|
||||
"user": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@@ -136,6 +136,9 @@ class DeviceConsumption(TypedDict):
|
||||
# This is an ever increasing value
|
||||
stat_consumption: str
|
||||
|
||||
# An optional custom name for display in energy graphs
|
||||
name: str | None
|
||||
|
||||
|
||||
class EnergyPreferences(TypedDict):
|
||||
"""Dictionary holding the energy data."""
|
||||
@@ -287,6 +290,7 @@ ENERGY_SOURCE_SCHEMA = vol.All(
|
||||
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("stat_consumption"): str,
|
||||
vol.Optional("name"): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20240404.1"]
|
||||
"requirements": ["home-assistant-frontend==20240404.2"]
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -13,7 +14,7 @@ from fyta_cli.fyta_exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -30,36 +31,70 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Fyta."""
|
||||
|
||||
VERSION = 1
|
||||
_entry: ConfigEntry | None = None
|
||||
|
||||
async def async_auth(self, user_input: Mapping[str, Any]) -> dict[str, str]:
|
||||
"""Reusable Auth Helper."""
|
||||
fyta = FytaConnector(user_input[CONF_USERNAME], user_input[CONF_PASSWORD])
|
||||
|
||||
try:
|
||||
await fyta.login()
|
||||
except FytaConnectionError:
|
||||
return {"base": "cannot_connect"}
|
||||
except FytaAuthentificationError:
|
||||
return {"base": "invalid_auth"}
|
||||
except FytaPasswordError:
|
||||
return {"base": "invalid_auth", CONF_PASSWORD: "password_error"}
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
_LOGGER.error(e)
|
||||
return {"base": "unknown"}
|
||||
finally:
|
||||
await fyta.client.close()
|
||||
|
||||
return {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors = {}
|
||||
if user_input:
|
||||
self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]})
|
||||
|
||||
fyta = FytaConnector(user_input[CONF_USERNAME], user_input[CONF_PASSWORD])
|
||||
|
||||
try:
|
||||
await fyta.login()
|
||||
except FytaConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FytaAuthentificationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FytaPasswordError:
|
||||
errors["base"] = "invalid_auth"
|
||||
errors[CONF_PASSWORD] = "password_error"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not (errors := await self.async_auth(user_input)):
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_USERNAME], data=user_input
|
||||
)
|
||||
finally:
|
||||
await fyta.client.close()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle flow upon an API authentication error."""
|
||||
self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthorization flow."""
|
||||
errors = {}
|
||||
assert self._entry is not None
|
||||
|
||||
if user_input and not (errors := await self.async_auth(user_input)):
|
||||
return self.async_update_reload_and_abort(
|
||||
self._entry, data={**self._entry.data, **user_input}
|
||||
)
|
||||
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA,
|
||||
{CONF_USERNAME: self._entry.data[CONF_USERNAME], **(user_input or {})},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -13,7 +13,7 @@ from fyta_cli.fyta_exceptions import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -52,4 +52,4 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]):
|
||||
except FytaConnectionError as ex:
|
||||
raise ConfigEntryNotReady from ex
|
||||
except (FytaAuthentificationError, FytaPasswordError) as ex:
|
||||
raise ConfigEntryError from ex
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
|
@@ -8,8 +8,19 @@
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Update your credentials for FYTA API",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
|
@@ -196,7 +196,7 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
|
||||
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -211,7 +211,7 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
|
||||
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
|
||||
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -197,7 +197,6 @@ class HassIOIngress(HomeAssistantView):
|
||||
content_type or simple_response.content_type
|
||||
):
|
||||
simple_response.enable_compression()
|
||||
await simple_response.prepare(request)
|
||||
return simple_response
|
||||
|
||||
# Stream response
|
||||
|
@@ -22,7 +22,7 @@ from .const import (
|
||||
from .handler import async_apply_suggestion
|
||||
from .issues import Issue, Suggestion
|
||||
|
||||
SUGGESTION_CONFIRMATION_REQUIRED = {"system_execute_reboot"}
|
||||
SUGGESTION_CONFIRMATION_REQUIRED = {"system_adopt_data_disk", "system_execute_reboot"}
|
||||
|
||||
EXTRA_PLACEHOLDERS = {
|
||||
"issue_mount_mount_failed": {
|
||||
|
@@ -51,8 +51,15 @@
|
||||
"title": "Multiple data disks detected",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"system_rename_data_disk": {
|
||||
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the fix option to rename the filesystem to prevent this. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system."
|
||||
"fix_menu": {
|
||||
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the 'Rename' option to rename the filesystem to prevent this. Use the 'Adopt' option to make that your data disk and rename the existing one. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system.",
|
||||
"menu_options": {
|
||||
"system_rename_data_disk": "Rename",
|
||||
"system_adopt_data_disk": "Adopt"
|
||||
}
|
||||
},
|
||||
"system_adopt_data_disk": {
|
||||
"description": "This fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period. After the reboot `{reference}` will be the data disk of Home Assistant and your existing data disk will be renamed and ignored."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -2,15 +2,36 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
from holidays import country_holidays
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_COUNTRY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import SetupPhases, async_pause_setup
|
||||
|
||||
from .const import CONF_PROVINCE
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CALENDAR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Holiday from a config entry."""
|
||||
country: str = entry.data[CONF_COUNTRY]
|
||||
province: str | None = entry.data.get(CONF_PROVINCE)
|
||||
|
||||
# We only import here to ensure that that its not imported later
|
||||
# in the event loop since the platforms will call country_holidays
|
||||
# which loads python code from disk.
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# import executor job is used here because multiple integrations use
|
||||
# the holidays library and it is not thread safe to import it in parallel
|
||||
# https://github.com/python/cpython/issues/83065
|
||||
await hass.async_add_import_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
@@ -10,7 +10,8 @@ import os
|
||||
import socket
|
||||
import ssl
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Final, TypedDict, cast
|
||||
from typing import Any, Final, Required, TypedDict, cast
|
||||
from urllib.parse import quote_plus, urljoin
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.abc import AbstractStreamWriter
|
||||
@@ -30,8 +31,20 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, SERVER_PORT
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import (
|
||||
HomeAssistantError,
|
||||
ServiceValidationError,
|
||||
Unauthorized,
|
||||
UnknownUser,
|
||||
)
|
||||
from homeassistant.helpers import storage
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.http import (
|
||||
@@ -53,9 +66,13 @@ from homeassistant.util import dt as dt_util, ssl as ssl_util
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .auth import async_setup_auth
|
||||
from .auth import async_setup_auth, async_sign_path
|
||||
from .ban import setup_bans
|
||||
from .const import KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER # noqa: F401
|
||||
from .const import ( # noqa: F401
|
||||
KEY_HASS_REFRESH_TOKEN_ID,
|
||||
KEY_HASS_USER,
|
||||
StrictConnectionMode,
|
||||
)
|
||||
from .cors import setup_cors
|
||||
from .decorators import require_admin # noqa: F401
|
||||
from .forwarded import async_setup_forwarded
|
||||
@@ -80,6 +97,7 @@ CONF_TRUSTED_PROXIES: Final = "trusted_proxies"
|
||||
CONF_LOGIN_ATTEMPTS_THRESHOLD: Final = "login_attempts_threshold"
|
||||
CONF_IP_BAN_ENABLED: Final = "ip_ban_enabled"
|
||||
CONF_SSL_PROFILE: Final = "ssl_profile"
|
||||
CONF_STRICT_CONNECTION: Final = "strict_connection"
|
||||
|
||||
SSL_MODERN: Final = "modern"
|
||||
SSL_INTERMEDIATE: Final = "intermediate"
|
||||
@@ -129,6 +147,9 @@ HTTP_SCHEMA: Final = vol.All(
|
||||
[SSL_INTERMEDIATE, SSL_MODERN]
|
||||
),
|
||||
vol.Optional(CONF_USE_X_FRAME_OPTIONS, default=True): cv.boolean,
|
||||
vol.Optional(
|
||||
CONF_STRICT_CONNECTION, default=StrictConnectionMode.DISABLED
|
||||
): vol.In([e.value for e in StrictConnectionMode]),
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -152,6 +173,7 @@ class ConfData(TypedDict, total=False):
|
||||
login_attempts_threshold: int
|
||||
ip_ban_enabled: bool
|
||||
ssl_profile: str
|
||||
strict_connection: Required[StrictConnectionMode]
|
||||
|
||||
|
||||
@bind_hass
|
||||
@@ -218,6 +240,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
login_threshold=login_threshold,
|
||||
is_ban_enabled=is_ban_enabled,
|
||||
use_x_frame_options=use_x_frame_options,
|
||||
strict_connection_non_cloud=conf[CONF_STRICT_CONNECTION],
|
||||
)
|
||||
|
||||
async def stop_server(event: Event) -> None:
|
||||
@@ -247,6 +270,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
local_ip, host, server_port, ssl_certificate is not None
|
||||
)
|
||||
|
||||
_setup_services(hass, conf)
|
||||
return True
|
||||
|
||||
|
||||
@@ -331,6 +355,7 @@ class HomeAssistantHTTP:
|
||||
login_threshold: int,
|
||||
is_ban_enabled: bool,
|
||||
use_x_frame_options: bool,
|
||||
strict_connection_non_cloud: StrictConnectionMode,
|
||||
) -> None:
|
||||
"""Initialize the server."""
|
||||
self.app[KEY_HASS] = self.hass
|
||||
@@ -347,7 +372,7 @@ class HomeAssistantHTTP:
|
||||
if is_ban_enabled:
|
||||
setup_bans(self.hass, self.app, login_threshold)
|
||||
|
||||
await async_setup_auth(self.hass, self.app)
|
||||
await async_setup_auth(self.hass, self.app, strict_connection_non_cloud)
|
||||
|
||||
setup_headers(self.app, use_x_frame_options)
|
||||
setup_cors(self.app, cors_origins)
|
||||
@@ -577,3 +602,59 @@ async def start_http_server_and_save_config(
|
||||
]
|
||||
|
||||
store.async_delay_save(lambda: conf, SAVE_DELAY)
|
||||
|
||||
|
||||
@callback
|
||||
def _setup_services(hass: HomeAssistant, conf: ConfData) -> None:
|
||||
"""Set up services for HTTP component."""
|
||||
|
||||
async def create_temporary_strict_connection_url(
|
||||
call: ServiceCall,
|
||||
) -> ServiceResponse:
|
||||
"""Create a strict connection url and return it."""
|
||||
# Copied form homeassistant/helpers/service.py#_async_admin_handler
|
||||
# as the helper supports no responses yet
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(context=call.context)
|
||||
if not user.is_admin:
|
||||
raise Unauthorized(context=call.context)
|
||||
|
||||
if conf[CONF_STRICT_CONNECTION] is StrictConnectionMode.DISABLED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="strict_connection_not_enabled_non_cloud",
|
||||
)
|
||||
|
||||
try:
|
||||
url = get_url(hass, prefer_external=True, allow_internal=False)
|
||||
except NoURLAvailableError as ex:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_external_url_available",
|
||||
) from ex
|
||||
|
||||
# to avoid circular import
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from homeassistant.components.auth import STRICT_CONNECTION_URL
|
||||
|
||||
path = async_sign_path(
|
||||
hass,
|
||||
STRICT_CONNECTION_URL,
|
||||
datetime.timedelta(hours=1),
|
||||
use_content_user=True,
|
||||
)
|
||||
url = urljoin(url, path)
|
||||
|
||||
return {
|
||||
"url": f"https://login.home-assistant.io?u={quote_plus(url)}",
|
||||
"direct_url": url,
|
||||
}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"create_temporary_strict_connection_url",
|
||||
create_temporary_strict_connection_url,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
@@ -4,14 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
import time
|
||||
from typing import Any, Final
|
||||
|
||||
from aiohttp import hdrs
|
||||
from aiohttp.web import Application, Request, StreamResponse, middleware
|
||||
from aiohttp.web import Application, Request, Response, StreamResponse, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest
|
||||
from aiohttp_session import session_middleware
|
||||
import jwt
|
||||
from jwt import api_jws
|
||||
from yarl import URL
|
||||
@@ -27,7 +31,13 @@ from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.network import is_local
|
||||
|
||||
from .const import KEY_AUTHENTICATED, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER
|
||||
from .const import (
|
||||
KEY_AUTHENTICATED,
|
||||
KEY_HASS_REFRESH_TOKEN_ID,
|
||||
KEY_HASS_USER,
|
||||
StrictConnectionMode,
|
||||
)
|
||||
from .session import HomeAssistantCookieStorage
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -39,6 +49,10 @@ SAFE_QUERY_PARAMS: Final = ["height", "width"]
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = "http.auth"
|
||||
CONTENT_USER_NAME = "Home Assistant Content"
|
||||
STRICT_CONNECTION_EXCLUDED_PATH = "/api/webhook/"
|
||||
STRICT_CONNECTION_STATIC_PAGE = os.path.join(
|
||||
os.path.dirname(__file__), "strict_connection_static_page.html"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -48,13 +62,16 @@ def async_sign_path(
|
||||
expiration: timedelta,
|
||||
*,
|
||||
refresh_token_id: str | None = None,
|
||||
use_content_user: bool = False,
|
||||
) -> str:
|
||||
"""Sign a path for temporary access without auth header."""
|
||||
if (secret := hass.data.get(DATA_SIGN_SECRET)) is None:
|
||||
secret = hass.data[DATA_SIGN_SECRET] = secrets.token_hex()
|
||||
|
||||
if refresh_token_id is None:
|
||||
if connection := websocket_api.current_connection.get():
|
||||
if use_content_user:
|
||||
refresh_token_id = hass.data[STORAGE_KEY]
|
||||
elif connection := websocket_api.current_connection.get():
|
||||
refresh_token_id = connection.refresh_token_id
|
||||
elif (
|
||||
request := current_request.get()
|
||||
@@ -114,7 +131,11 @@ def async_user_not_allowed_do_auth(
|
||||
return "User cannot authenticate remotely"
|
||||
|
||||
|
||||
async def async_setup_auth(hass: HomeAssistant, app: Application) -> None:
|
||||
async def async_setup_auth(
|
||||
hass: HomeAssistant,
|
||||
app: Application,
|
||||
strict_connection_mode_non_cloud: StrictConnectionMode,
|
||||
) -> None:
|
||||
"""Create auth middleware for the app."""
|
||||
store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
if (data := await store.async_load()) is None:
|
||||
@@ -135,6 +156,16 @@ async def async_setup_auth(hass: HomeAssistant, app: Application) -> None:
|
||||
await store.async_save(data)
|
||||
|
||||
hass.data[STORAGE_KEY] = refresh_token.id
|
||||
strict_connection_static_file_content = None
|
||||
if strict_connection_mode_non_cloud is StrictConnectionMode.STATIC_PAGE:
|
||||
|
||||
def read_static_page() -> str:
|
||||
with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file:
|
||||
return file.read()
|
||||
|
||||
strict_connection_static_file_content = await hass.async_add_executor_job(
|
||||
read_static_page
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_validate_auth_header(request: Request) -> bool:
|
||||
@@ -224,6 +255,22 @@ async def async_setup_auth(hass: HomeAssistant, app: Application) -> None:
|
||||
authenticated = True
|
||||
auth_type = "signed request"
|
||||
|
||||
if (
|
||||
not authenticated
|
||||
and strict_connection_mode_non_cloud is not StrictConnectionMode.DISABLED
|
||||
and not request.path.startswith(STRICT_CONNECTION_EXCLUDED_PATH)
|
||||
and not await hass.auth.session.async_validate_request_for_strict_connection_session(
|
||||
request
|
||||
)
|
||||
and (
|
||||
resp := _async_perform_action_on_non_local(
|
||||
request, strict_connection_static_file_content
|
||||
)
|
||||
)
|
||||
is not None
|
||||
):
|
||||
return resp
|
||||
|
||||
if authenticated and _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
_LOGGER.debug(
|
||||
"Authenticated %s for %s using %s",
|
||||
@@ -235,4 +282,43 @@ async def async_setup_auth(hass: HomeAssistant, app: Application) -> None:
|
||||
request[KEY_AUTHENTICATED] = authenticated
|
||||
return await handler(request)
|
||||
|
||||
app.middlewares.append(session_middleware(HomeAssistantCookieStorage(hass)))
|
||||
app.middlewares.append(auth_middleware)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_perform_action_on_non_local(
|
||||
request: Request,
|
||||
strict_connection_static_file_content: str | None,
|
||||
) -> StreamResponse | None:
|
||||
"""Perform strict connection mode action if the request is not local.
|
||||
|
||||
The function does the following:
|
||||
- Try to get the IP address of the request. If it fails, assume it's not local
|
||||
- If the request is local, return None (allow the request to continue)
|
||||
- If strict_connection_static_file_content is set, return a response with the content
|
||||
- Otherwise close the connection and raise an exception
|
||||
"""
|
||||
try:
|
||||
ip_address_ = ip_address(request.remote) # type: ignore[arg-type]
|
||||
except ValueError:
|
||||
_LOGGER.debug("Invalid IP address: %s", request.remote)
|
||||
ip_address_ = None
|
||||
|
||||
if ip_address_ and is_local(ip_address_):
|
||||
return None
|
||||
|
||||
_LOGGER.debug("Perform strict connection action for %s", ip_address_)
|
||||
if strict_connection_static_file_content:
|
||||
return Response(
|
||||
text=strict_connection_static_file_content,
|
||||
content_type="text/html",
|
||||
status=HTTPStatus.IM_A_TEAPOT,
|
||||
)
|
||||
|
||||
if transport := request.transport:
|
||||
# it should never happen that we don't have a transport
|
||||
transport.close()
|
||||
|
||||
# We need to raise an exception to stop processing the request
|
||||
raise HTTPBadRequest
|
||||
|
@@ -1,8 +1,17 @@
|
||||
"""HTTP specific constants."""
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS # noqa: F401
|
||||
|
||||
KEY_HASS_USER: Final = "hass_user"
|
||||
KEY_HASS_REFRESH_TOKEN_ID: Final = "hass_refresh_token_id"
|
||||
|
||||
|
||||
class StrictConnectionMode(StrEnum):
|
||||
"""Enum for strict connection mode."""
|
||||
|
||||
DISABLED = "disabled"
|
||||
STATIC_PAGE = "static_page"
|
||||
DROP_CONNECTION = "drop_connection"
|
||||
|
5
homeassistant/components/http/icons.json
Normal file
5
homeassistant/components/http/icons.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"services": {
|
||||
"create_temporary_strict_connection_url": "mdi:login-variant"
|
||||
}
|
||||
}
|
1
homeassistant/components/http/services.yaml
Normal file
1
homeassistant/components/http/services.yaml
Normal file
@@ -0,0 +1 @@
|
||||
create_temporary_strict_connection_url: ~
|
160
homeassistant/components/http/session.py
Normal file
160
homeassistant/components/http/session.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Session http module."""
|
||||
|
||||
from functools import lru_cache
|
||||
import logging
|
||||
|
||||
from aiohttp.web import Request, StreamResponse
|
||||
from aiohttp_session import Session, SessionData
|
||||
from aiohttp_session.cookie_storage import EncryptedCookieStorage
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
from homeassistant.auth.const import REFRESH_TOKEN_EXPIRATION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
|
||||
|
||||
from .ban import process_wrong_login
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
COOKIE_NAME = "SC"
|
||||
PREFIXED_COOKIE_NAME = f"__Host-{COOKIE_NAME}"
|
||||
SESSION_CACHE_SIZE = 16
|
||||
|
||||
|
||||
def _get_cookie_name(is_secure: bool) -> str:
|
||||
"""Return the cookie name."""
|
||||
return PREFIXED_COOKIE_NAME if is_secure else COOKIE_NAME
|
||||
|
||||
|
||||
class HomeAssistantCookieStorage(EncryptedCookieStorage):
|
||||
"""Home Assistant cookie storage.
|
||||
|
||||
Own class is required:
|
||||
- to set the secure flag based on the connection type
|
||||
- to use a LRU cache for session decryption
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the cookie storage."""
|
||||
super().__init__(
|
||||
hass.auth.session.key,
|
||||
cookie_name=PREFIXED_COOKIE_NAME,
|
||||
max_age=int(REFRESH_TOKEN_EXPIRATION),
|
||||
httponly=True,
|
||||
samesite="Lax",
|
||||
secure=True,
|
||||
encoder=json_dumps,
|
||||
decoder=json_loads,
|
||||
)
|
||||
self._hass = hass
|
||||
|
||||
def _secure_connection(self, request: Request) -> bool:
|
||||
"""Return if the connection is secure (https)."""
|
||||
return is_cloud_connection(self._hass) or request.secure
|
||||
|
||||
def load_cookie(self, request: Request) -> str | None:
|
||||
"""Load cookie."""
|
||||
is_secure = self._secure_connection(request)
|
||||
cookie_name = _get_cookie_name(is_secure)
|
||||
return request.cookies.get(cookie_name)
|
||||
|
||||
@lru_cache(maxsize=SESSION_CACHE_SIZE)
|
||||
def _decrypt_cookie(self, cookie: str) -> Session | None:
|
||||
"""Decrypt and validate cookie."""
|
||||
try:
|
||||
data = SessionData( # type: ignore[misc]
|
||||
self._decoder(
|
||||
self._fernet.decrypt(
|
||||
cookie.encode("utf-8"), ttl=self.max_age
|
||||
).decode("utf-8")
|
||||
)
|
||||
)
|
||||
except (InvalidToken, TypeError, ValueError, *JSON_DECODE_EXCEPTIONS):
|
||||
_LOGGER.warning("Cannot decrypt/parse cookie value")
|
||||
return None
|
||||
|
||||
session = Session(None, data=data, new=data is None, max_age=self.max_age)
|
||||
|
||||
# Validate session if not empty
|
||||
if (
|
||||
not session.empty
|
||||
and not self._hass.auth.session.async_validate_strict_connection_session(
|
||||
session
|
||||
)
|
||||
):
|
||||
# Invalidate session as it is not valid
|
||||
session.invalidate()
|
||||
|
||||
return session
|
||||
|
||||
async def new_session(self) -> Session:
|
||||
"""Create a new session and mark it as changed."""
|
||||
session = Session(None, data=None, new=True, max_age=self.max_age)
|
||||
session.changed()
|
||||
return session
|
||||
|
||||
async def load_session(self, request: Request) -> Session:
|
||||
"""Load session."""
|
||||
# Split parent function to use lru_cache
|
||||
if (cookie := self.load_cookie(request)) is None:
|
||||
return await self.new_session()
|
||||
|
||||
if (session := self._decrypt_cookie(cookie)) is None:
|
||||
# Decrypting/parsing failed, log wrong login and create a new session
|
||||
await process_wrong_login(request)
|
||||
session = await self.new_session()
|
||||
|
||||
return session
|
||||
|
||||
async def save_session(
|
||||
self, request: Request, response: StreamResponse, session: Session
|
||||
) -> None:
|
||||
"""Save session."""
|
||||
|
||||
is_secure = self._secure_connection(request)
|
||||
cookie_name = _get_cookie_name(is_secure)
|
||||
|
||||
if session.empty:
|
||||
response.del_cookie(cookie_name)
|
||||
else:
|
||||
params = self.cookie_params.copy()
|
||||
params["secure"] = is_secure
|
||||
params["max_age"] = session.max_age
|
||||
|
||||
cookie_data = self._encoder(self._get_session_data(session)).encode("utf-8")
|
||||
response.set_cookie(
|
||||
cookie_name,
|
||||
self._fernet.encrypt(cookie_data).decode("utf-8"),
|
||||
**params,
|
||||
)
|
||||
# Add Cache-Control header to not cache the cookie as it
|
||||
# is used for session management
|
||||
self._add_cache_control_header(response)
|
||||
|
||||
@staticmethod
|
||||
def _add_cache_control_header(response: StreamResponse) -> None:
|
||||
"""Add/set cache control header to no-cache="Set-Cookie"."""
|
||||
# Structure of the Cache-Control header defined in
|
||||
# https://datatracker.ietf.org/doc/html/rfc2068#section-14.9
|
||||
if header := response.headers.get("Cache-Control"):
|
||||
directives = []
|
||||
for directive in header.split(","):
|
||||
directive = directive.strip()
|
||||
directive_lowered = directive.lower()
|
||||
if directive_lowered.startswith("no-cache"):
|
||||
if "set-cookie" in directive_lowered or directive.find("=") == -1:
|
||||
# Set-Cookie is already in the no-cache directive or
|
||||
# the whole request should not be cached -> Nothing to do
|
||||
return
|
||||
|
||||
# Add Set-Cookie to the no-cache
|
||||
# [:-1] to remove the " at the end of the directive
|
||||
directive = f"{directive[:-1]}, Set-Cookie"
|
||||
|
||||
directives.append(directive)
|
||||
header = ", ".join(directives)
|
||||
else:
|
||||
header = 'no-cache="Set-Cookie"'
|
||||
response.headers["Cache-Control"] = header
|
140
homeassistant/components/http/strict_connection_static_page.html
Normal file
140
homeassistant/components/http/strict_connection_static_page.html
Normal file
File diff suppressed because one or more lines are too long
16
homeassistant/components/http/strings.json
Normal file
16
homeassistant/components/http/strings.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"exceptions": {
|
||||
"strict_connection_not_enabled_non_cloud": {
|
||||
"message": "Strict connection is not enabled for non-cloud requests"
|
||||
},
|
||||
"no_external_url_available": {
|
||||
"message": "No external URL available"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create_temporary_strict_connection_url": {
|
||||
"name": "Create a temporary strict connection URL",
|
||||
"description": "Create a temporary strict connection URL, which can be used to login on another device."
|
||||
}
|
||||
}
|
||||
}
|
@@ -443,23 +443,24 @@ class ImapPushDataUpdateCoordinator(ImapDataUpdateCoordinator):
|
||||
_LOGGER.debug("Connected to server %s using IMAP push", entry.data[CONF_SERVER])
|
||||
super().__init__(hass, imap_client, entry, None)
|
||||
self._push_wait_task: asyncio.Task[None] | None = None
|
||||
self.number_of_messages: int | None = None
|
||||
|
||||
async def _async_update_data(self) -> int | None:
|
||||
"""Update the number of unread emails."""
|
||||
await self.async_start()
|
||||
return None
|
||||
return self.number_of_messages
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start coordinator."""
|
||||
self._push_wait_task = self.hass.async_create_background_task(
|
||||
self._async_wait_push_loop(), "Wait for IMAP data push", eager_start=False
|
||||
self._async_wait_push_loop(), "Wait for IMAP data push"
|
||||
)
|
||||
|
||||
async def _async_wait_push_loop(self) -> None:
|
||||
"""Wait for data push from server."""
|
||||
while True:
|
||||
try:
|
||||
number_of_messages = await self._async_fetch_number_of_messages()
|
||||
self.number_of_messages = await self._async_fetch_number_of_messages()
|
||||
except InvalidAuth as ex:
|
||||
self.auth_errors += 1
|
||||
await self._cleanup()
|
||||
@@ -489,7 +490,7 @@ class ImapPushDataUpdateCoordinator(ImapDataUpdateCoordinator):
|
||||
continue
|
||||
else:
|
||||
self.auth_errors = 0
|
||||
self.async_set_updated_data(number_of_messages)
|
||||
self.async_set_updated_data(self.number_of_messages)
|
||||
try:
|
||||
idle: asyncio.Future = await self.imap_client.idle_start()
|
||||
await self.imap_client.wait_server_push()
|
||||
|
@@ -32,6 +32,7 @@ COMPONENTS_WITH_DEMO_PLATFORM = [
|
||||
Platform.IMAGE,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.WEATHER,
|
||||
@@ -70,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
return True
|
||||
|
||||
|
||||
def _create_issues(hass):
|
||||
def _create_issues(hass: HomeAssistant) -> None:
|
||||
"""Create some issue registry issues."""
|
||||
async_create_issue(
|
||||
hass,
|
||||
|
54
homeassistant/components/kitchen_sink/notify.py
Normal file
54
homeassistant/components/kitchen_sink/notify.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Demo platform that offers a fake notify entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.notify import NotifyEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the demo notify entity platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoNotify(
|
||||
unique_id="just_notify_me",
|
||||
device_name="MyBox",
|
||||
entity_name="Personal notifier",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoNotify(NotifyEntity):
|
||||
"""Representation of a demo notify entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
device_name: str,
|
||||
entity_name: str | None,
|
||||
) -> None:
|
||||
"""Initialize the Demo button entity."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=device_name,
|
||||
)
|
||||
self._attr_name = entity_name
|
||||
|
||||
async def async_send_message(self, message: str) -> None:
|
||||
"""Send out a persistent notification."""
|
||||
persistent_notification.async_create(self.hass, message, "Demo notification")
|
@@ -517,13 +517,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
params[ATTR_COLOR_TEMP_KELVIN]
|
||||
)
|
||||
elif ATTR_RGB_COLOR in params and ColorMode.RGB not in supported_color_modes:
|
||||
assert (rgb_color := params.pop(ATTR_RGB_COLOR)) is not None
|
||||
rgb_color = params.pop(ATTR_RGB_COLOR)
|
||||
assert rgb_color is not None
|
||||
if ColorMode.RGBW in supported_color_modes:
|
||||
params[ATTR_RGBW_COLOR] = color_util.color_rgb_to_rgbw(*rgb_color)
|
||||
elif ColorMode.RGBWW in supported_color_modes:
|
||||
# https://github.com/python/mypy/issues/13673
|
||||
params[ATTR_RGBWW_COLOR] = color_util.color_rgb_to_rgbww(
|
||||
*rgb_color, # type: ignore[call-arg]
|
||||
*rgb_color,
|
||||
light.min_color_temp_kelvin,
|
||||
light.max_color_temp_kelvin,
|
||||
)
|
||||
@@ -584,9 +584,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
elif (
|
||||
ATTR_RGBWW_COLOR in params and ColorMode.RGBWW not in supported_color_modes
|
||||
):
|
||||
assert (rgbww_color := params.pop(ATTR_RGBWW_COLOR)) is not None
|
||||
# https://github.com/python/mypy/issues/13673
|
||||
rgb_color = color_util.color_rgbww_to_rgb( # type: ignore[call-arg]
|
||||
rgbww_color = params.pop(ATTR_RGBWW_COLOR)
|
||||
assert rgbww_color is not None
|
||||
rgb_color = color_util.color_rgbww_to_rgb(
|
||||
*rgbww_color, light.min_color_temp_kelvin, light.max_color_temp_kelvin
|
||||
)
|
||||
if ColorMode.RGB in supported_color_modes:
|
||||
|
@@ -463,6 +463,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
|
||||
"""Release modbus resources."""
|
||||
if DOMAIN not in hass.data:
|
||||
_LOGGER.error("Modbus cannot reload, because it was never loaded")
|
||||
return
|
||||
_LOGGER.info("Modbus reloading")
|
||||
hubs = hass.data[DOMAIN]
|
||||
for name in hubs:
|
||||
|
@@ -268,7 +268,7 @@ async def async_start( # noqa: C901
|
||||
availability_conf[CONF_TOPIC] = f"{topic[:-1]}{base}"
|
||||
|
||||
# If present, the node_id will be included in the discovered object id
|
||||
discovery_id = " ".join((node_id, object_id)) if node_id else object_id
|
||||
discovery_id = f"{node_id} {object_id}" if node_id else object_id
|
||||
discovery_hash = (component, discovery_id)
|
||||
|
||||
if discovery_payload:
|
||||
|
@@ -57,7 +57,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def _state_publisher(evt: Event[EventStateChangedData]) -> None:
|
||||
entity_id = evt.data["entity_id"]
|
||||
assert (new_state := evt.data["new_state"])
|
||||
new_state = evt.data["new_state"]
|
||||
assert new_state
|
||||
|
||||
payload = new_state.state
|
||||
|
||||
|
@@ -12,12 +12,15 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/neato",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pybotvac"],
|
||||
"requirements": ["pybotvac==0.0.24"]
|
||||
"requirements": ["pybotvac==0.0.25"]
|
||||
}
|
||||
|
60
homeassistant/components/netatmo/binary_sensor.py
Normal file
60
homeassistant/components/netatmo/binary_sensor.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Support for Netatmo binary sensors."""
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import NETATMO_CREATE_WEATHER_SENSOR
|
||||
from .data_handler import NetatmoDevice
|
||||
from .entity import NetatmoWeatherModuleEntity
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = (
|
||||
BinarySensorEntityDescription(
|
||||
key="reachable",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up Netatmo binary sensors based on a config entry."""
|
||||
|
||||
@callback
|
||||
def _create_weather_binary_sensor_entity(netatmo_device: NetatmoDevice) -> None:
|
||||
async_add_entities(
|
||||
NetatmoWeatherBinarySensor(netatmo_device, description)
|
||||
for description in BINARY_SENSOR_TYPES
|
||||
if description.key in netatmo_device.device.features
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass, NETATMO_CREATE_WEATHER_SENSOR, _create_weather_binary_sensor_entity
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class NetatmoWeatherBinarySensor(NetatmoWeatherModuleEntity, BinarySensorEntity):
|
||||
"""Implementation of a Netatmo binary sensor."""
|
||||
|
||||
def __init__(
|
||||
self, device: NetatmoDevice, description: BinarySensorEntityDescription
|
||||
) -> None:
|
||||
"""Initialize a Netatmo binary sensor."""
|
||||
super().__init__(device)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self.device.entity_id}-{description.key}"
|
||||
|
||||
@callback
|
||||
def async_update_callback(self) -> None:
|
||||
"""Update the entity's state."""
|
||||
self._attr_is_on = self.device.reachable
|
||||
self.async_write_ha_state()
|
@@ -9,6 +9,7 @@ MANUFACTURER = "Netatmo"
|
||||
DEFAULT_ATTRIBUTION = f"Data provided by {MANUFACTURER}"
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
|
@@ -3,12 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pyatmo import DeviceType, Home, Module, Room
|
||||
from pyatmo.modules.base_class import NetatmoBase
|
||||
from pyatmo.modules.base_class import NetatmoBase, Place
|
||||
from pyatmo.modules.device_types import DEVICE_DESCRIPTION_MAP
|
||||
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -16,6 +17,7 @@ from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (
|
||||
CONF_URL_ENERGY,
|
||||
CONF_URL_WEATHER,
|
||||
DATA_DEVICE_IDS,
|
||||
DEFAULT_ATTRIBUTION,
|
||||
DOMAIN,
|
||||
@@ -166,3 +168,39 @@ class NetatmoModuleEntity(NetatmoDeviceEntity):
|
||||
def device_type(self) -> DeviceType:
|
||||
"""Return the device type."""
|
||||
return self.device.device_type
|
||||
|
||||
|
||||
class NetatmoWeatherModuleEntity(NetatmoModuleEntity):
|
||||
"""Netatmo weather module entity base class."""
|
||||
|
||||
_attr_configuration_url = CONF_URL_WEATHER
|
||||
|
||||
def __init__(self, device: NetatmoDevice) -> None:
|
||||
"""Set up a Netatmo weather module entity."""
|
||||
super().__init__(device)
|
||||
category = getattr(self.device.device_category, "name")
|
||||
self._publishers.extend(
|
||||
[
|
||||
{
|
||||
"name": category,
|
||||
SIGNAL_NAME: category,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
if hasattr(self.device, "place"):
|
||||
place = cast(Place, getattr(self.device, "place"))
|
||||
if hasattr(place, "location") and place.location is not None:
|
||||
self._attr_extra_state_attributes.update(
|
||||
{
|
||||
ATTR_LATITUDE: place.location.latitude,
|
||||
ATTR_LONGITUDE: place.location.longitude,
|
||||
}
|
||||
)
|
||||
|
||||
@property
|
||||
def device_type(self) -> DeviceType:
|
||||
"""Return the Netatmo device type."""
|
||||
if "." not in self.device.device_type:
|
||||
return super().device_type
|
||||
return DeviceType(self.device.device_type.partition(".")[2])
|
||||
|
@@ -8,7 +8,6 @@ import logging
|
||||
from typing import Any, cast
|
||||
|
||||
import pyatmo
|
||||
from pyatmo import DeviceType
|
||||
from pyatmo.modules import PublicWeatherArea
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -48,7 +47,6 @@ from homeassistant.helpers.typing import StateType
|
||||
from .const import (
|
||||
CONF_URL_ENERGY,
|
||||
CONF_URL_PUBLIC_WEATHER,
|
||||
CONF_URL_WEATHER,
|
||||
CONF_WEATHER_AREAS,
|
||||
DATA_HANDLER,
|
||||
DOMAIN,
|
||||
@@ -59,25 +57,38 @@ from .const import (
|
||||
SIGNAL_NAME,
|
||||
)
|
||||
from .data_handler import HOME, PUBLIC, NetatmoDataHandler, NetatmoDevice, NetatmoRoom
|
||||
from .entity import NetatmoBaseEntity, NetatmoModuleEntity, NetatmoRoomEntity
|
||||
from .entity import (
|
||||
NetatmoBaseEntity,
|
||||
NetatmoModuleEntity,
|
||||
NetatmoRoomEntity,
|
||||
NetatmoWeatherModuleEntity,
|
||||
)
|
||||
from .helper import NetatmoArea
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DIRECTION_OPTIONS = [
|
||||
"n",
|
||||
"ne",
|
||||
"e",
|
||||
"se",
|
||||
"s",
|
||||
"sw",
|
||||
"w",
|
||||
"nw",
|
||||
]
|
||||
|
||||
|
||||
def process_health(health: StateType) -> str | None:
|
||||
"""Process health index and return string for display."""
|
||||
if not isinstance(health, int):
|
||||
return None
|
||||
if health == 0:
|
||||
return "Healthy"
|
||||
if health == 1:
|
||||
return "Fine"
|
||||
if health == 2:
|
||||
return "Fair"
|
||||
if health == 3:
|
||||
return "Poor"
|
||||
return "Unhealthy"
|
||||
return {
|
||||
0: "healthy",
|
||||
1: "fine",
|
||||
2: "fair",
|
||||
3: "poor",
|
||||
}.get(health, "unhealthy")
|
||||
|
||||
|
||||
def process_rf(strength: StateType) -> str | None:
|
||||
@@ -196,6 +207,9 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = (
|
||||
NetatmoSensorEntityDescription(
|
||||
key="windangle",
|
||||
netatmo_name="wind_direction",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=DIRECTION_OPTIONS,
|
||||
value_fn=lambda x: x.lower() if isinstance(x, str) else None,
|
||||
),
|
||||
NetatmoSensorEntityDescription(
|
||||
key="windangle_value",
|
||||
@@ -215,6 +229,9 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = (
|
||||
key="gustangle",
|
||||
netatmo_name="gust_direction",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=DIRECTION_OPTIONS,
|
||||
value_fn=lambda x: x.lower() if isinstance(x, str) else None,
|
||||
),
|
||||
NetatmoSensorEntityDescription(
|
||||
key="gustangle_value",
|
||||
@@ -254,6 +271,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = (
|
||||
NetatmoSensorEntityDescription(
|
||||
key="health_idx",
|
||||
netatmo_name="health_idx",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["healthy", "fine", "fair", "poor", "unhealthy"],
|
||||
value_fn=process_health,
|
||||
),
|
||||
NetatmoSensorEntityDescription(
|
||||
@@ -491,11 +510,10 @@ async def async_setup_entry(
|
||||
await add_public_entities(False)
|
||||
|
||||
|
||||
class NetatmoWeatherSensor(NetatmoModuleEntity, SensorEntity):
|
||||
class NetatmoWeatherSensor(NetatmoWeatherModuleEntity, SensorEntity):
|
||||
"""Implementation of a Netatmo weather/home coach sensor."""
|
||||
|
||||
entity_description: NetatmoSensorEntityDescription
|
||||
_attr_configuration_url = CONF_URL_WEATHER
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -506,34 +524,8 @@ class NetatmoWeatherSensor(NetatmoModuleEntity, SensorEntity):
|
||||
super().__init__(netatmo_device)
|
||||
self.entity_description = description
|
||||
self._attr_translation_key = description.netatmo_name
|
||||
category = getattr(self.device.device_category, "name")
|
||||
self._publishers.extend(
|
||||
[
|
||||
{
|
||||
"name": category,
|
||||
SIGNAL_NAME: category,
|
||||
},
|
||||
]
|
||||
)
|
||||
self._attr_unique_id = f"{self.device.entity_id}-{description.key}"
|
||||
|
||||
if hasattr(self.device, "place"):
|
||||
place = cast(pyatmo.modules.base_class.Place, getattr(self.device, "place"))
|
||||
if hasattr(place, "location") and place.location is not None:
|
||||
self._attr_extra_state_attributes.update(
|
||||
{
|
||||
ATTR_LATITUDE: place.location.latitude,
|
||||
ATTR_LONGITUDE: place.location.longitude,
|
||||
}
|
||||
)
|
||||
|
||||
@property
|
||||
def device_type(self) -> DeviceType:
|
||||
"""Return the Netatmo device type."""
|
||||
if "." not in self.device.device_type:
|
||||
return super().device_type
|
||||
return DeviceType(self.device.device_type.partition(".")[2])
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
|
@@ -185,13 +185,33 @@
|
||||
"name": "Precipitation today"
|
||||
},
|
||||
"wind_direction": {
|
||||
"name": "Wind direction"
|
||||
"name": "Wind direction",
|
||||
"state": {
|
||||
"n": "North",
|
||||
"ne": "North-east",
|
||||
"e": "East",
|
||||
"se": "South-east",
|
||||
"s": "South",
|
||||
"sw": "South-west",
|
||||
"w": "West",
|
||||
"nw": "North-west"
|
||||
}
|
||||
},
|
||||
"wind_angle": {
|
||||
"name": "Wind angle"
|
||||
},
|
||||
"gust_direction": {
|
||||
"name": "Gust direction"
|
||||
"name": "Gust direction",
|
||||
"state": {
|
||||
"n": "[%key:component::netatmo::entity::sensor::wind_direction::state::n%]",
|
||||
"ne": "[%key:component::netatmo::entity::sensor::wind_direction::state::ne%]",
|
||||
"e": "[%key:component::netatmo::entity::sensor::wind_direction::state::e%]",
|
||||
"se": "[%key:component::netatmo::entity::sensor::wind_direction::state::se%]",
|
||||
"s": "[%key:component::netatmo::entity::sensor::wind_direction::state::s%]",
|
||||
"sw": "[%key:component::netatmo::entity::sensor::wind_direction::state::sw%]",
|
||||
"w": "[%key:component::netatmo::entity::sensor::wind_direction::state::w%]",
|
||||
"nw": "[%key:component::netatmo::entity::sensor::wind_direction::state::nw%]"
|
||||
}
|
||||
},
|
||||
"gust_angle": {
|
||||
"name": "Gust angle"
|
||||
@@ -209,7 +229,14 @@
|
||||
"name": "Wi-Fi"
|
||||
},
|
||||
"health_idx": {
|
||||
"name": "Health index"
|
||||
"name": "Health index",
|
||||
"state": {
|
||||
"healthy": "Healthy",
|
||||
"fine": "Fine",
|
||||
"fair": "Fair",
|
||||
"poor": "Poor",
|
||||
"unhealthy": "Unhealthy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -82,11 +82,13 @@ class NextBusDepartureSensor(
|
||||
|
||||
def _log_debug(self, message, *args):
|
||||
"""Log debug message with prefix."""
|
||||
_LOGGER.debug(":".join((self.agency, self.route, self.stop, message)), *args)
|
||||
msg = f"{self.agency}:{self.route}:{self.stop}:{message}"
|
||||
_LOGGER.debug(msg, *args)
|
||||
|
||||
def _log_err(self, message, *args):
|
||||
"""Log error message with prefix."""
|
||||
_LOGGER.error(":".join((self.agency, self.route, self.stop, message)), *args)
|
||||
msg = f"{self.agency}:{self.route}:{self.stop}:{message}"
|
||||
_LOGGER.error(msg, *args)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Read data from coordinator after adding to hass."""
|
||||
|
@@ -62,7 +62,7 @@ def _normalize_ips_and_network(hosts_str: str) -> list[str] | None:
|
||||
start, end = host.split("-", 1)
|
||||
if "." not in end:
|
||||
ip_1, ip_2, ip_3, _ = start.split(".", 3)
|
||||
end = ".".join([ip_1, ip_2, ip_3, end])
|
||||
end = f"{ip_1}.{ip_2}.{ip_3}.{end}"
|
||||
summarize_address_range(ip_address(start), ip_address(end))
|
||||
except ValueError:
|
||||
pass
|
||||
|
@@ -2,24 +2,36 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import cached_property, partial
|
||||
import logging
|
||||
from typing import Any, final, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.components.persistent_notification as pn
|
||||
from homeassistant.const import CONF_NAME, CONF_PLATFORM
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_PLATFORM, STATE_UNAVAILABLE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ( # noqa: F401
|
||||
ATTR_DATA,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_RECIPIENTS,
|
||||
ATTR_TARGET,
|
||||
ATTR_TITLE,
|
||||
DOMAIN,
|
||||
NOTIFY_SERVICE_SCHEMA,
|
||||
SERVICE_NOTIFY,
|
||||
SERVICE_PERSISTENT_NOTIFICATION,
|
||||
SERVICE_SEND_MESSAGE,
|
||||
)
|
||||
from .legacy import ( # noqa: F401
|
||||
BaseNotificationService,
|
||||
@@ -29,9 +41,17 @@ from .legacy import ( # noqa: F401
|
||||
check_templates_warn,
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
|
||||
# Platform specific data
|
||||
ATTR_TITLE_DEFAULT = "Home Assistant"
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PLATFORM): cv.string, vol.Optional(CONF_NAME): cv.string},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
@@ -50,6 +70,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
# legacy platforms to finish setting up.
|
||||
hass.async_create_task(setup, eager_start=True)
|
||||
|
||||
component = hass.data[DOMAIN] = EntityComponent[NotifyEntity](_LOGGER, DOMAIN, hass)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SEND_MESSAGE,
|
||||
{vol.Required(ATTR_MESSAGE): cv.string},
|
||||
"_async_send_message",
|
||||
)
|
||||
|
||||
async def persistent_notification(service: ServiceCall) -> None:
|
||||
"""Send notification via the built-in persistent_notify integration."""
|
||||
message: Template = service.data[ATTR_MESSAGE]
|
||||
@@ -79,3 +106,66 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class NotifyEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
"""A class that describes button entities."""
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
component: EntityComponent[NotifyEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
component: EntityComponent[NotifyEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
|
||||
|
||||
class NotifyEntity(RestoreEntity):
|
||||
"""Representation of a notify entity."""
|
||||
|
||||
entity_description: NotifyEntityDescription
|
||||
_attr_should_poll = False
|
||||
_attr_device_class: None
|
||||
_attr_state: None = None
|
||||
__last_notified_isoformat: str | None = None
|
||||
|
||||
@cached_property
|
||||
@final
|
||||
@override
|
||||
def state(self) -> str | None:
|
||||
"""Return the entity state."""
|
||||
return self.__last_notified_isoformat
|
||||
|
||||
def __set_state(self, state: str | None) -> None:
|
||||
"""Invalidate the cache of the cached property."""
|
||||
self.__dict__.pop("state", None)
|
||||
self.__last_notified_isoformat = state
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the notify entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state not in (STATE_UNAVAILABLE, None):
|
||||
self.__set_state(state.state)
|
||||
|
||||
@final
|
||||
async def _async_send_message(self, **kwargs: Any) -> None:
|
||||
"""Send a notification message (from e.g., service call).
|
||||
|
||||
Should not be overridden, handle setting last notification timestamp.
|
||||
"""
|
||||
self.__set_state(dt_util.utcnow().isoformat())
|
||||
self.async_write_ha_state()
|
||||
await self.async_send_message(**kwargs)
|
||||
|
||||
def send_message(self, message: str) -> None:
|
||||
"""Send a message."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_send_message(self, message: str) -> None:
|
||||
"""Send a message."""
|
||||
await self.hass.async_add_executor_job(partial(self.send_message, message))
|
||||
|
@@ -11,9 +11,12 @@ ATTR_DATA = "data"
|
||||
# Text to notify user of
|
||||
ATTR_MESSAGE = "message"
|
||||
|
||||
# Target of the notification (user, device, etc)
|
||||
# Target of the (legacy) notification (user, device, etc)
|
||||
ATTR_TARGET = "target"
|
||||
|
||||
# Recipients for a notification
|
||||
ATTR_RECIPIENTS = "recipients"
|
||||
|
||||
# Title of notification
|
||||
ATTR_TITLE = "title"
|
||||
|
||||
@@ -22,6 +25,7 @@ DOMAIN = "notify"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
SERVICE_NOTIFY = "notify"
|
||||
SERVICE_SEND_MESSAGE = "send_message"
|
||||
SERVICE_PERSISTENT_NOTIFICATION = "persistent_notification"
|
||||
|
||||
NOTIFY_SERVICE_SCHEMA = vol.Schema(
|
||||
|
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:message"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"notify": "mdi:bell-ring",
|
||||
"persistent_notification": "mdi:bell-badge"
|
||||
"persistent_notification": "mdi:bell-badge",
|
||||
"send_message": "mdi:message-arrow-right"
|
||||
}
|
||||
}
|
||||
|
@@ -20,6 +20,16 @@ notify:
|
||||
selector:
|
||||
object:
|
||||
|
||||
send_message:
|
||||
target:
|
||||
entity:
|
||||
domain: notify
|
||||
fields:
|
||||
message:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
|
||||
persistent_notification:
|
||||
fields:
|
||||
message:
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"title": "Notifications",
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::notify::title%]"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"notify": {
|
||||
"name": "Send a notification",
|
||||
@@ -23,6 +28,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_message": {
|
||||
"name": "Send a notification message",
|
||||
"description": "Sends a notification message.",
|
||||
"fields": {
|
||||
"message": {
|
||||
"name": "Message",
|
||||
"description": "Your notification message."
|
||||
}
|
||||
}
|
||||
},
|
||||
"persistent_notification": {
|
||||
"name": "Send a persistent notification",
|
||||
"description": "Sends a notification that is visible in the **Notifications** panel.",
|
||||
|
@@ -159,13 +159,9 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
)
|
||||
)
|
||||
|
||||
name_prefix = " ".join(
|
||||
(
|
||||
"Opower",
|
||||
self.api.utility.subdomain(),
|
||||
account.meter_type.name.lower(),
|
||||
account.utility_account_id,
|
||||
)
|
||||
name_prefix = (
|
||||
f"Opower {self.api.utility.subdomain()} "
|
||||
f"{account.meter_type.name.lower()} {account.utility_account_id}"
|
||||
)
|
||||
cost_metadata = StatisticMetaData(
|
||||
has_mean=False,
|
||||
|
@@ -123,7 +123,8 @@ class RainBirdSwitch(CoordinatorEntity[RainbirdUpdateCoordinator], SwitchEntity)
|
||||
|
||||
# The device reflects the old state for a few moments. Update the
|
||||
# state manually and trigger a refresh after a short debounced delay.
|
||||
self.coordinator.data.active_zones.remove(self._zone)
|
||||
if self.is_on:
|
||||
self.coordinator.data.active_zones.remove(self._zone)
|
||||
self.async_write_ha_state()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
@@ -2,10 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from ring_doorbell import Auth, Ring
|
||||
from ring_doorbell import Auth, Ring, RingDevices
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__
|
||||
@@ -13,23 +15,26 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
RING_API,
|
||||
RING_DEVICES,
|
||||
RING_DEVICES_COORDINATOR,
|
||||
RING_NOTIFICATIONS_COORDINATOR,
|
||||
)
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import RingDataCoordinator, RingNotificationsCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RingData:
|
||||
"""Class to support type hinting of ring data collection."""
|
||||
|
||||
api: Ring
|
||||
devices: RingDevices
|
||||
devices_coordinator: RingDataCoordinator
|
||||
notifications_coordinator: RingNotificationsCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
|
||||
def token_updater(token):
|
||||
def token_updater(token: dict[str, Any]) -> None:
|
||||
"""Handle from sync context when token is updated."""
|
||||
hass.loop.call_soon_threadsafe(
|
||||
partial(
|
||||
@@ -51,12 +56,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await devices_coordinator.async_config_entry_first_refresh()
|
||||
await notifications_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
|
||||
RING_API: ring,
|
||||
RING_DEVICES: ring.devices(),
|
||||
RING_DEVICES_COORDINATOR: devices_coordinator,
|
||||
RING_NOTIFICATIONS_COORDINATOR: notifications_coordinator,
|
||||
}
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = RingData(
|
||||
api=ring,
|
||||
devices=ring.devices(),
|
||||
devices_coordinator=devices_coordinator,
|
||||
notifications_coordinator=notifications_coordinator,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -83,8 +88,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
for info in hass.data[DOMAIN].values():
|
||||
await info[RING_DEVICES_COORDINATOR].async_refresh()
|
||||
await info[RING_NOTIFICATIONS_COORDINATOR].async_refresh()
|
||||
ring_data = cast(RingData, info)
|
||||
await ring_data.devices_coordinator.async_refresh()
|
||||
await ring_data.notifications_coordinator.async_refresh()
|
||||
|
||||
# register service
|
||||
hass.services.async_register(DOMAIN, "update", async_refresh_all)
|
||||
@@ -121,8 +127,9 @@ async def _migrate_old_unique_ids(hass: HomeAssistant, entry_id: str) -> None:
|
||||
@callback
|
||||
def _async_migrator(entity_entry: er.RegistryEntry) -> dict[str, str] | None:
|
||||
# Old format for camera and light was int
|
||||
if isinstance(entity_entry.unique_id, int):
|
||||
new_unique_id = str(entity_entry.unique_id)
|
||||
unique_id = cast(str | int, entity_entry.unique_id)
|
||||
if isinstance(unique_id, int):
|
||||
new_unique_id = str(unique_id)
|
||||
if existing_entity_id := entity_registry.async_get_entity_id(
|
||||
entity_entry.domain, entity_entry.platform, new_unique_id
|
||||
):
|
||||
|
@@ -2,10 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from ring_doorbell import Ring, RingEvent, RingGeneric
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
@@ -15,29 +18,32 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, RING_API, RING_DEVICES, RING_NOTIFICATIONS_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingNotificationsCoordinator
|
||||
from .entity import RingEntity
|
||||
from .entity import RingBaseEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class RingBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Ring binary sensor entity."""
|
||||
|
||||
category: list[str]
|
||||
exists_fn: Callable[[RingGeneric], bool]
|
||||
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[RingBinarySensorEntityDescription, ...] = (
|
||||
RingBinarySensorEntityDescription(
|
||||
key="ding",
|
||||
translation_key="ding",
|
||||
category=["doorbots", "authorized_doorbots", "other"],
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
exists_fn=lambda device: device.family
|
||||
in {"doorbots", "authorized_doorbots", "other"},
|
||||
),
|
||||
RingBinarySensorEntityDescription(
|
||||
key="motion",
|
||||
category=["doorbots", "authorized_doorbots", "stickup_cams"],
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
exists_fn=lambda device: device.family
|
||||
in {"doorbots", "authorized_doorbots", "stickup_cams"},
|
||||
),
|
||||
)
|
||||
|
||||
@@ -48,34 +54,36 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Ring binary sensors from a config entry."""
|
||||
ring = hass.data[DOMAIN][config_entry.entry_id][RING_API]
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
notifications_coordinator: RingNotificationsCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
][RING_NOTIFICATIONS_COORDINATOR]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
entities = [
|
||||
RingBinarySensor(ring, device, notifications_coordinator, description)
|
||||
for device_type in ("doorbots", "authorized_doorbots", "stickup_cams", "other")
|
||||
RingBinarySensor(
|
||||
ring_data.api,
|
||||
device,
|
||||
ring_data.notifications_coordinator,
|
||||
description,
|
||||
)
|
||||
for description in BINARY_SENSOR_TYPES
|
||||
if device_type in description.category
|
||||
for device in devices[device_type]
|
||||
for device in ring_data.devices.all_devices
|
||||
if description.exists_fn(device)
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class RingBinarySensor(RingEntity, BinarySensorEntity):
|
||||
class RingBinarySensor(
|
||||
RingBaseEntity[RingNotificationsCoordinator], BinarySensorEntity
|
||||
):
|
||||
"""A binary sensor implementation for Ring device."""
|
||||
|
||||
_active_alert: dict[str, Any] | None = None
|
||||
_active_alert: RingEvent | None = None
|
||||
entity_description: RingBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ring,
|
||||
device,
|
||||
coordinator,
|
||||
ring: Ring,
|
||||
device: RingGeneric,
|
||||
coordinator: RingNotificationsCoordinator,
|
||||
description: RingBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize a sensor for Ring device."""
|
||||
@@ -89,13 +97,13 @@ class RingBinarySensor(RingEntity, BinarySensorEntity):
|
||||
self._update_alert()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self, _=None):
|
||||
def _handle_coordinator_update(self, _: Any = None) -> None:
|
||||
"""Call update method."""
|
||||
self._update_alert()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _update_alert(self):
|
||||
def _update_alert(self) -> None:
|
||||
"""Update active alert."""
|
||||
self._active_alert = next(
|
||||
(
|
||||
@@ -108,21 +116,23 @@ class RingBinarySensor(RingEntity, BinarySensorEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self._active_alert is not None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> Mapping[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
attrs = super().extra_state_attributes
|
||||
|
||||
if self._active_alert is None:
|
||||
return attrs
|
||||
|
||||
assert isinstance(attrs, dict)
|
||||
attrs["state"] = self._active_alert["state"]
|
||||
attrs["expires_at"] = datetime.fromtimestamp(
|
||||
self._active_alert.get("now") + self._active_alert.get("expires_in")
|
||||
).isoformat()
|
||||
now = self._active_alert.get("now")
|
||||
expires_in = self._active_alert.get("expires_in")
|
||||
assert now and expires_in
|
||||
attrs["expires_at"] = datetime.fromtimestamp(now + expires_in).isoformat()
|
||||
|
||||
return attrs
|
||||
|
@@ -2,12 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ring_doorbell import RingOther
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity, exception_wrap
|
||||
|
||||
@@ -22,25 +25,23 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Create the buttons for the Ring devices."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
devices_coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
|
||||
async_add_entities(
|
||||
RingDoorButton(device, devices_coordinator, BUTTON_DESCRIPTION)
|
||||
for device in devices["other"]
|
||||
for device in ring_data.devices.other
|
||||
if device.has_capability("open")
|
||||
)
|
||||
|
||||
|
||||
class RingDoorButton(RingEntity, ButtonEntity):
|
||||
class RingDoorButton(RingEntity[RingOther], ButtonEntity):
|
||||
"""Creates a button to open the ring intercom door."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device,
|
||||
coordinator,
|
||||
device: RingOther,
|
||||
coordinator: RingDataCoordinator,
|
||||
description: ButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
@@ -52,6 +53,6 @@ class RingDoorButton(RingEntity, ButtonEntity):
|
||||
self._attr_unique_id = f"{device.id}-{description.key}"
|
||||
|
||||
@exception_wrap
|
||||
def press(self):
|
||||
def press(self) -> None:
|
||||
"""Open the door."""
|
||||
self._device.open_door()
|
||||
|
@@ -3,11 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from itertools import chain
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from haffmpeg.camera import CameraMjpeg
|
||||
from ring_doorbell import RingDoorBell
|
||||
|
||||
from homeassistant.components import ffmpeg
|
||||
from homeassistant.components.camera import Camera
|
||||
@@ -17,7 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity, exception_wrap
|
||||
|
||||
@@ -33,50 +35,50 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up a Ring Door Bell and StickUp Camera."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
devices_coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass)
|
||||
|
||||
cams = []
|
||||
for camera in chain(
|
||||
devices["doorbots"], devices["authorized_doorbots"], devices["stickup_cams"]
|
||||
):
|
||||
if not camera.has_subscription:
|
||||
continue
|
||||
|
||||
cams.append(RingCam(camera, devices_coordinator, ffmpeg_manager))
|
||||
cams = [
|
||||
RingCam(camera, devices_coordinator, ffmpeg_manager)
|
||||
for camera in ring_data.devices.video_devices
|
||||
if camera.has_subscription
|
||||
]
|
||||
|
||||
async_add_entities(cams)
|
||||
|
||||
|
||||
class RingCam(RingEntity, Camera):
|
||||
class RingCam(RingEntity[RingDoorBell], Camera):
|
||||
"""An implementation of a Ring Door Bell camera."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, device, coordinator, ffmpeg_manager):
|
||||
def __init__(
|
||||
self,
|
||||
device: RingDoorBell,
|
||||
coordinator: RingDataCoordinator,
|
||||
ffmpeg_manager: ffmpeg.FFmpegManager,
|
||||
) -> None:
|
||||
"""Initialize a Ring Door Bell camera."""
|
||||
super().__init__(device, coordinator)
|
||||
Camera.__init__(self)
|
||||
|
||||
self._ffmpeg_manager = ffmpeg_manager
|
||||
self._last_event = None
|
||||
self._last_video_id = None
|
||||
self._video_url = None
|
||||
self._image = None
|
||||
self._last_event: dict[str, Any] | None = None
|
||||
self._last_video_id: int | None = None
|
||||
self._video_url: str | None = None
|
||||
self._image: bytes | None = None
|
||||
self._expires_at = dt_util.utcnow() - FORCE_REFRESH_INTERVAL
|
||||
self._attr_unique_id = str(device.id)
|
||||
if device.has_capability(MOTION_DETECTION_CAPABILITY):
|
||||
self._attr_motion_detection_enabled = device.motion_detection
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Call update method."""
|
||||
history_data: Optional[list]
|
||||
if not (history_data := self._get_coordinator_history()):
|
||||
return
|
||||
self._device = self._get_coordinator_data().get_video_device(
|
||||
self._device.device_api_id
|
||||
)
|
||||
history_data = self._device.last_history
|
||||
if history_data:
|
||||
self._last_event = history_data[0]
|
||||
self.async_schedule_update_ha_state(True)
|
||||
@@ -89,7 +91,7 @@ class RingCam(RingEntity, Camera):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
"video_url": self._video_url,
|
||||
@@ -100,7 +102,7 @@ class RingCam(RingEntity, Camera):
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image response from the camera."""
|
||||
if self._image is None and self._video_url:
|
||||
if self._image is None and self._video_url is not None:
|
||||
image = await ffmpeg.async_get_image(
|
||||
self.hass,
|
||||
self._video_url,
|
||||
@@ -113,10 +115,12 @@ class RingCam(RingEntity, Camera):
|
||||
|
||||
return self._image
|
||||
|
||||
async def handle_async_mjpeg_stream(self, request):
|
||||
async def handle_async_mjpeg_stream(
|
||||
self, request: web.Request
|
||||
) -> web.StreamResponse | None:
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
if self._video_url is None:
|
||||
return
|
||||
return None
|
||||
|
||||
stream = CameraMjpeg(self._ffmpeg_manager.binary)
|
||||
await stream.open_camera(self._video_url)
|
||||
@@ -132,7 +136,7 @@ class RingCam(RingEntity, Camera):
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
async def async_update(self):
|
||||
async def async_update(self) -> None:
|
||||
"""Update camera entity and refresh attributes."""
|
||||
if (
|
||||
self._device.has_capability(MOTION_DETECTION_CAPABILITY)
|
||||
@@ -160,11 +164,15 @@ class RingCam(RingEntity, Camera):
|
||||
self._expires_at = FORCE_REFRESH_INTERVAL + utcnow
|
||||
|
||||
@exception_wrap
|
||||
def _get_video(self):
|
||||
return self._device.recording_url(self._last_event["id"])
|
||||
def _get_video(self) -> str | None:
|
||||
if self._last_event is None:
|
||||
return None
|
||||
event_id = self._last_event.get("id")
|
||||
assert event_id and isinstance(event_id, int)
|
||||
return self._device.recording_url(event_id)
|
||||
|
||||
@exception_wrap
|
||||
def _set_motion_detection_enabled(self, new_state):
|
||||
def _set_motion_detection_enabled(self, new_state: bool) -> None:
|
||||
if not self._device.has_capability(MOTION_DETECTION_CAPABILITY):
|
||||
_LOGGER.error(
|
||||
"Entity %s does not have motion detection capability", self.entity_id
|
||||
|
@@ -28,7 +28,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str})
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data):
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
auth = Auth(f"{APPLICATION_NAME}/{ha_version}")
|
||||
@@ -56,9 +56,11 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_pass: dict[str, Any] = {}
|
||||
reauth_entry: ConfigEntry | None = None
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
token = await validate_input(self.hass, user_input)
|
||||
@@ -82,7 +84,9 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_2fa(self, user_input=None):
|
||||
async def async_step_2fa(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle 2fa step."""
|
||||
if user_input:
|
||||
if self.reauth_entry:
|
||||
@@ -110,7 +114,7 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
assert self.reauth_entry is not None
|
||||
|
||||
if user_input:
|
||||
|
@@ -28,10 +28,4 @@ PLATFORMS = [
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
NOTIFICATIONS_SCAN_INTERVAL = timedelta(seconds=5)
|
||||
|
||||
RING_API = "api"
|
||||
RING_DEVICES = "devices"
|
||||
|
||||
RING_DEVICES_COORDINATOR = "device_data"
|
||||
RING_NOTIFICATIONS_COORDINATOR = "dings_data"
|
||||
|
||||
CONF_2FA = "2fa"
|
||||
|
@@ -2,11 +2,10 @@
|
||||
|
||||
from asyncio import TaskGroup
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
from typing import TypeVar, TypeVarTuple
|
||||
|
||||
from ring_doorbell import AuthenticationError, Ring, RingError, RingGeneric, RingTimeout
|
||||
from ring_doorbell import AuthenticationError, Ring, RingDevices, RingError, RingTimeout
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -16,10 +15,13 @@ from .const import NOTIFICATIONS_SCAN_INTERVAL, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_R = TypeVar("_R")
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
|
||||
async def _call_api(
|
||||
hass: HomeAssistant, target: Callable[..., Any], *args, msg_suffix: str = ""
|
||||
):
|
||||
hass: HomeAssistant, target: Callable[[*_Ts], _R], *args: *_Ts, msg_suffix: str = ""
|
||||
) -> _R:
|
||||
try:
|
||||
return await hass.async_add_executor_job(target, *args)
|
||||
except AuthenticationError as err:
|
||||
@@ -34,15 +36,7 @@ async def _call_api(
|
||||
raise UpdateFailed(f"Error communicating with API{msg_suffix}: {err}") from err
|
||||
|
||||
|
||||
@dataclass
|
||||
class RingDeviceData:
|
||||
"""RingDeviceData."""
|
||||
|
||||
device: RingGeneric
|
||||
history: Optional[list] = None
|
||||
|
||||
|
||||
class RingDataCoordinator(DataUpdateCoordinator[dict[int, RingDeviceData]]):
|
||||
class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
|
||||
"""Base class for device coordinators."""
|
||||
|
||||
def __init__(
|
||||
@@ -60,45 +54,39 @@ class RingDataCoordinator(DataUpdateCoordinator[dict[int, RingDeviceData]]):
|
||||
self.ring_api: Ring = ring_api
|
||||
self.first_call: bool = True
|
||||
|
||||
async def _async_update_data(self):
|
||||
async def _async_update_data(self) -> RingDevices:
|
||||
"""Fetch data from API endpoint."""
|
||||
update_method: str = "update_data" if self.first_call else "update_devices"
|
||||
await _call_api(self.hass, getattr(self.ring_api, update_method))
|
||||
self.first_call = False
|
||||
data: dict[str, RingDeviceData] = {}
|
||||
devices: dict[str : list[RingGeneric]] = self.ring_api.devices()
|
||||
devices: RingDevices = self.ring_api.devices()
|
||||
subscribed_device_ids = set(self.async_contexts())
|
||||
for device_type in devices:
|
||||
for device in devices[device_type]:
|
||||
# Don't update all devices in the ring api, only those that set
|
||||
# their device id as context when they subscribed.
|
||||
if device.id in subscribed_device_ids:
|
||||
data[device.id] = RingDeviceData(device=device)
|
||||
try:
|
||||
history_task = None
|
||||
async with TaskGroup() as tg:
|
||||
if device.has_capability("history"):
|
||||
history_task = tg.create_task(
|
||||
_call_api(
|
||||
self.hass,
|
||||
lambda device: device.history(limit=10),
|
||||
device,
|
||||
msg_suffix=f" for device {device.name}", # device_id is the mac
|
||||
)
|
||||
)
|
||||
for device in devices.all_devices:
|
||||
# Don't update all devices in the ring api, only those that set
|
||||
# their device id as context when they subscribed.
|
||||
if device.id in subscribed_device_ids:
|
||||
try:
|
||||
async with TaskGroup() as tg:
|
||||
if device.has_capability("history"):
|
||||
tg.create_task(
|
||||
_call_api(
|
||||
self.hass,
|
||||
device.update_health_data,
|
||||
msg_suffix=f" for device {device.name}",
|
||||
lambda device: device.history(limit=10),
|
||||
device,
|
||||
msg_suffix=f" for device {device.name}", # device_id is the mac
|
||||
)
|
||||
)
|
||||
if history_task:
|
||||
data[device.id].history = history_task.result()
|
||||
except ExceptionGroup as eg:
|
||||
raise eg.exceptions[0] # noqa: B904
|
||||
tg.create_task(
|
||||
_call_api(
|
||||
self.hass,
|
||||
device.update_health_data,
|
||||
msg_suffix=f" for device {device.name}",
|
||||
)
|
||||
)
|
||||
except ExceptionGroup as eg:
|
||||
raise eg.exceptions[0] # noqa: B904
|
||||
|
||||
return data
|
||||
return devices
|
||||
|
||||
|
||||
class RingNotificationsCoordinator(DataUpdateCoordinator[None]):
|
||||
@@ -114,6 +102,6 @@ class RingNotificationsCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
self.ring_api: Ring = ring_api
|
||||
|
||||
async def _async_update_data(self):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await _call_api(self.hass, self.ring_api.update_dings)
|
||||
|
@@ -4,12 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from ring_doorbell import Ring
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = {
|
||||
@@ -33,11 +32,12 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
ring: Ring = hass.data[DOMAIN][entry.entry_id]["api"]
|
||||
ring_data: RingData = hass.data[DOMAIN][entry.entry_id]
|
||||
devices_data = ring_data.api.devices_data
|
||||
devices_raw = [
|
||||
ring.devices_data[device_type][device_id]
|
||||
for device_type in ring.devices_data
|
||||
for device_id in ring.devices_data[device_type]
|
||||
devices_data[device_type][device_id]
|
||||
for device_type in devices_data
|
||||
for device_id in devices_data[device_type]
|
||||
]
|
||||
return async_redact_data(
|
||||
{"device_data": devices_raw},
|
||||
|
@@ -1,9 +1,16 @@
|
||||
"""Base class for Ring entity."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
from typing import Any, Concatenate, Generic, ParamSpec, cast
|
||||
|
||||
from ring_doorbell import AuthenticationError, RingError, RingGeneric, RingTimeout
|
||||
from ring_doorbell import (
|
||||
AuthenticationError,
|
||||
RingDevices,
|
||||
RingError,
|
||||
RingGeneric,
|
||||
RingTimeout,
|
||||
)
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -11,26 +18,25 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION, DOMAIN
|
||||
from .coordinator import (
|
||||
RingDataCoordinator,
|
||||
RingDeviceData,
|
||||
RingNotificationsCoordinator,
|
||||
)
|
||||
from .coordinator import RingDataCoordinator, RingNotificationsCoordinator
|
||||
|
||||
RingDeviceT = TypeVar("RingDeviceT", bound=RingGeneric, default=RingGeneric)
|
||||
|
||||
_RingCoordinatorT = TypeVar(
|
||||
"_RingCoordinatorT",
|
||||
bound=(RingDataCoordinator | RingNotificationsCoordinator),
|
||||
)
|
||||
_T = TypeVar("_T", bound="RingEntity")
|
||||
_RingBaseEntityT = TypeVar("_RingBaseEntityT", bound="RingBaseEntity[Any, Any]")
|
||||
_R = TypeVar("_R")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
|
||||
def exception_wrap(
|
||||
func: Callable[Concatenate[_T, _P], Any],
|
||||
) -> Callable[Concatenate[_T, _P], Any]:
|
||||
func: Callable[Concatenate[_RingBaseEntityT, _P], _R],
|
||||
) -> Callable[Concatenate[_RingBaseEntityT, _P], _R]:
|
||||
"""Define a wrapper to catch exceptions and raise HomeAssistant errors."""
|
||||
|
||||
def _wrap(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R:
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
except AuthenticationError as err:
|
||||
@@ -50,7 +56,9 @@ def exception_wrap(
|
||||
return _wrap
|
||||
|
||||
|
||||
class RingEntity(CoordinatorEntity[_RingCoordinatorT]):
|
||||
class RingBaseEntity(
|
||||
CoordinatorEntity[_RingCoordinatorT], Generic[_RingCoordinatorT, RingDeviceT]
|
||||
):
|
||||
"""Base implementation for Ring device."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
@@ -59,7 +67,7 @@ class RingEntity(CoordinatorEntity[_RingCoordinatorT]):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: RingGeneric,
|
||||
device: RingDeviceT,
|
||||
coordinator: _RingCoordinatorT,
|
||||
) -> None:
|
||||
"""Initialize a sensor for Ring device."""
|
||||
@@ -73,29 +81,17 @@ class RingEntity(CoordinatorEntity[_RingCoordinatorT]):
|
||||
name=device.name,
|
||||
)
|
||||
|
||||
def _get_coordinator_device_data(self) -> RingDeviceData | None:
|
||||
if (data := self.coordinator.data) and (
|
||||
device_data := data.get(self._device.id)
|
||||
):
|
||||
return device_data
|
||||
return None
|
||||
|
||||
def _get_coordinator_device(self) -> RingGeneric | None:
|
||||
if (device_data := self._get_coordinator_device_data()) and (
|
||||
device := device_data.device
|
||||
):
|
||||
return device
|
||||
return None
|
||||
class RingEntity(RingBaseEntity[RingDataCoordinator, RingDeviceT]):
|
||||
"""Implementation for Ring devices."""
|
||||
|
||||
def _get_coordinator_history(self) -> list | None:
|
||||
if (device_data := self._get_coordinator_device_data()) and (
|
||||
history := device_data.history
|
||||
):
|
||||
return history
|
||||
return None
|
||||
def _get_coordinator_data(self) -> RingDevices:
|
||||
return self.coordinator.data
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
if device := self._get_coordinator_device():
|
||||
self._device = device
|
||||
self._device = cast(
|
||||
RingDeviceT,
|
||||
self._get_coordinator_data().get_device(self._device.device_api_id),
|
||||
)
|
||||
super()._handle_coordinator_update()
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Component providing HA switch support for Ring Door Bell/Chimes."""
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum, auto
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -12,7 +13,8 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity, exception_wrap
|
||||
|
||||
@@ -26,8 +28,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SKIP_UPDATES_DELAY = timedelta(seconds=5)
|
||||
|
||||
ON_STATE = "on"
|
||||
OFF_STATE = "off"
|
||||
|
||||
class OnOffState(StrEnum):
|
||||
"""Enum for allowed on off states."""
|
||||
|
||||
ON = auto()
|
||||
OFF = auto()
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -36,56 +42,56 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Create the lights for the Ring devices."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
devices_coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
|
||||
async_add_entities(
|
||||
RingLight(device, devices_coordinator)
|
||||
for device in devices["stickup_cams"]
|
||||
for device in ring_data.devices.stickup_cams
|
||||
if device.has_capability("light")
|
||||
)
|
||||
|
||||
|
||||
class RingLight(RingEntity, LightEntity):
|
||||
class RingLight(RingEntity[RingStickUpCam], LightEntity):
|
||||
"""Creates a switch to turn the ring cameras light on and off."""
|
||||
|
||||
_attr_color_mode = ColorMode.ONOFF
|
||||
_attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
_attr_translation_key = "light"
|
||||
|
||||
def __init__(self, device, coordinator):
|
||||
def __init__(
|
||||
self, device: RingStickUpCam, coordinator: RingDataCoordinator
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
super().__init__(device, coordinator)
|
||||
self._attr_unique_id = str(device.id)
|
||||
self._attr_is_on = device.lights == ON_STATE
|
||||
self._attr_is_on = device.lights == OnOffState.ON
|
||||
self._no_updates_until = dt_util.utcnow()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Call update method."""
|
||||
if self._no_updates_until > dt_util.utcnow():
|
||||
return
|
||||
if (device := self._get_coordinator_device()) and isinstance(
|
||||
device, RingStickUpCam
|
||||
):
|
||||
self._attr_is_on = device.lights == ON_STATE
|
||||
device = self._get_coordinator_data().get_stickup_cam(
|
||||
self._device.device_api_id
|
||||
)
|
||||
self._attr_is_on = device.lights == OnOffState.ON
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@exception_wrap
|
||||
def _set_light(self, new_state):
|
||||
def _set_light(self, new_state: OnOffState) -> None:
|
||||
"""Update light state, and causes Home Assistant to correctly update."""
|
||||
self._device.lights = new_state
|
||||
|
||||
self._attr_is_on = new_state == ON_STATE
|
||||
self._attr_is_on = new_state == OnOffState.ON
|
||||
self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on for 30 seconds."""
|
||||
self._set_light(ON_STATE)
|
||||
self._set_light(OnOffState.ON)
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
self._set_light(OFF_STATE)
|
||||
self._set_light(OnOffState.OFF)
|
||||
|
@@ -2,10 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, Generic, cast
|
||||
|
||||
from ring_doorbell import RingGeneric
|
||||
from ring_doorbell import (
|
||||
RingCapability,
|
||||
RingChime,
|
||||
RingDoorBell,
|
||||
RingEventKind,
|
||||
RingGeneric,
|
||||
RingOther,
|
||||
)
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -21,10 +29,12 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity
|
||||
from .entity import RingDeviceT, RingEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -33,209 +43,192 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up a sensor for a Ring device."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
devices_coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
|
||||
entities = [
|
||||
description.cls(device, devices_coordinator, description)
|
||||
for device_type in (
|
||||
"chimes",
|
||||
"doorbots",
|
||||
"authorized_doorbots",
|
||||
"stickup_cams",
|
||||
"other",
|
||||
)
|
||||
RingSensor(device, devices_coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
if device_type in description.category
|
||||
for device in devices[device_type]
|
||||
if not (device_type == "battery" and device.battery_life is None)
|
||||
for device in ring_data.devices.all_devices
|
||||
if description.exists_fn(device)
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class RingSensor(RingEntity, SensorEntity):
|
||||
class RingSensor(RingEntity[RingDeviceT], SensorEntity):
|
||||
"""A sensor implementation for Ring device."""
|
||||
|
||||
entity_description: RingSensorEntityDescription
|
||||
entity_description: RingSensorEntityDescription[RingDeviceT]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: RingGeneric,
|
||||
device: RingDeviceT,
|
||||
coordinator: RingDataCoordinator,
|
||||
description: RingSensorEntityDescription,
|
||||
description: RingSensorEntityDescription[RingDeviceT],
|
||||
) -> None:
|
||||
"""Initialize a sensor for Ring device."""
|
||||
super().__init__(device, coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{device.id}-{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "volume":
|
||||
return self._device.volume
|
||||
if sensor_type == "doorbell_volume":
|
||||
return self._device.doorbell_volume
|
||||
if sensor_type == "mic_volume":
|
||||
return self._device.mic_volume
|
||||
if sensor_type == "voice_volume":
|
||||
return self._device.voice_volume
|
||||
|
||||
if sensor_type == "battery":
|
||||
return self._device.battery_life
|
||||
|
||||
|
||||
class HealthDataRingSensor(RingSensor):
|
||||
"""Ring sensor that relies on health data."""
|
||||
|
||||
# These sensors are data hungry and not useful. Disable by default.
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "wifi_signal_category":
|
||||
return self._device.wifi_signal_category
|
||||
|
||||
if sensor_type == "wifi_signal_strength":
|
||||
return self._device.wifi_signal_strength
|
||||
|
||||
|
||||
class HistoryRingSensor(RingSensor):
|
||||
"""Ring sensor that relies on history data."""
|
||||
|
||||
_latest_event: dict[str, Any] | None = None
|
||||
self._attr_entity_registry_enabled_default = (
|
||||
description.entity_registry_enabled_default
|
||||
)
|
||||
self._attr_native_value = self.entity_description.value_fn(self._device)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Call update method."""
|
||||
if not (history_data := self._get_coordinator_history()):
|
||||
return
|
||||
|
||||
kind = self.entity_description.kind
|
||||
found = None
|
||||
if kind is None:
|
||||
found = history_data[0]
|
||||
else:
|
||||
for entry in history_data:
|
||||
if entry["kind"] == kind:
|
||||
found = entry
|
||||
break
|
||||
|
||||
if not found:
|
||||
return
|
||||
|
||||
self._latest_event = found
|
||||
self._device = cast(
|
||||
RingDeviceT,
|
||||
self._get_coordinator_data().get_device(self._device.device_api_id),
|
||||
)
|
||||
# History values can drop off the last 10 events so only update
|
||||
# the value if it's not None
|
||||
if native_value := self.entity_description.value_fn(self._device):
|
||||
self._attr_native_value = native_value
|
||||
if extra_attrs := self.entity_description.extra_state_attributes_fn(
|
||||
self._device
|
||||
):
|
||||
self._attr_extra_state_attributes = extra_attrs
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._latest_event is None:
|
||||
return None
|
||||
|
||||
return self._latest_event["created_at"]
|
||||
def _get_last_event(
|
||||
history_data: list[dict[str, Any]], kind: RingEventKind | None
|
||||
) -> dict[str, Any] | None:
|
||||
if not history_data:
|
||||
return None
|
||||
if kind is None:
|
||||
return history_data[0]
|
||||
for entry in history_data:
|
||||
if entry["kind"] == kind.value:
|
||||
return entry
|
||||
return None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
attrs = super().extra_state_attributes
|
||||
|
||||
if self._latest_event:
|
||||
attrs["created_at"] = self._latest_event["created_at"]
|
||||
attrs["answered"] = self._latest_event["answered"]
|
||||
attrs["recording_status"] = self._latest_event["recording"]["status"]
|
||||
attrs["category"] = self._latest_event["kind"]
|
||||
|
||||
return attrs
|
||||
def _get_last_event_attrs(
|
||||
history_data: list[dict[str, Any]], kind: RingEventKind | None
|
||||
) -> dict[str, Any] | None:
|
||||
if last_event := _get_last_event(history_data, kind):
|
||||
return {
|
||||
"created_at": last_event.get("created_at"),
|
||||
"answered": last_event.get("answered"),
|
||||
"recording_status": last_event.get("recording", {}).get("status"),
|
||||
"category": last_event.get("kind"),
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class RingSensorEntityDescription(SensorEntityDescription):
|
||||
class RingSensorEntityDescription(SensorEntityDescription, Generic[RingDeviceT]):
|
||||
"""Describes Ring sensor entity."""
|
||||
|
||||
category: list[str]
|
||||
cls: type[RingSensor]
|
||||
|
||||
kind: str | None = None
|
||||
value_fn: Callable[[RingDeviceT], StateType] = lambda _: True
|
||||
exists_fn: Callable[[RingGeneric], bool] = lambda _: True
|
||||
extra_state_attributes_fn: Callable[[RingDeviceT], dict[str, Any] | None] = (
|
||||
lambda _: None
|
||||
)
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[RingSensorEntityDescription, ...] = (
|
||||
RingSensorEntityDescription(
|
||||
# For some reason mypy doesn't properly type check the default TypeVar value here
|
||||
# so for now the [RingGeneric] subscript needs to be specified.
|
||||
# Once https://github.com/python/mypy/issues/14851 is closed this should hopefully
|
||||
# be fixed and the [RingGeneric] subscript can be removed.
|
||||
# https://github.com/home-assistant/core/pull/115276#discussion_r1560106576
|
||||
SENSOR_TYPES: tuple[RingSensorEntityDescription[Any], ...] = (
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="battery",
|
||||
category=["doorbots", "authorized_doorbots", "stickup_cams", "other"],
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
cls=RingSensor,
|
||||
value_fn=lambda device: device.battery_life,
|
||||
exists_fn=lambda device: device.family != "chimes",
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="last_activity",
|
||||
translation_key="last_activity",
|
||||
category=["doorbots", "authorized_doorbots", "stickup_cams", "other"],
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
cls=HistoryRingSensor,
|
||||
value_fn=lambda device: last_event.get("created_at")
|
||||
if (last_event := _get_last_event(device.last_history, None))
|
||||
else None,
|
||||
extra_state_attributes_fn=lambda device: last_event_attrs
|
||||
if (last_event_attrs := _get_last_event_attrs(device.last_history, None))
|
||||
else None,
|
||||
exists_fn=lambda device: device.has_capability(RingCapability.HISTORY),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="last_ding",
|
||||
translation_key="last_ding",
|
||||
category=["doorbots", "authorized_doorbots", "other"],
|
||||
kind="ding",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
cls=HistoryRingSensor,
|
||||
value_fn=lambda device: last_event.get("created_at")
|
||||
if (last_event := _get_last_event(device.last_history, RingEventKind.DING))
|
||||
else None,
|
||||
extra_state_attributes_fn=lambda device: last_event_attrs
|
||||
if (
|
||||
last_event_attrs := _get_last_event_attrs(
|
||||
device.last_history, RingEventKind.DING
|
||||
)
|
||||
)
|
||||
else None,
|
||||
exists_fn=lambda device: device.has_capability(RingCapability.HISTORY),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="last_motion",
|
||||
translation_key="last_motion",
|
||||
category=["doorbots", "authorized_doorbots", "stickup_cams"],
|
||||
kind="motion",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
cls=HistoryRingSensor,
|
||||
value_fn=lambda device: last_event.get("created_at")
|
||||
if (last_event := _get_last_event(device.last_history, RingEventKind.MOTION))
|
||||
else None,
|
||||
extra_state_attributes_fn=lambda device: last_event_attrs
|
||||
if (
|
||||
last_event_attrs := _get_last_event_attrs(
|
||||
device.last_history, RingEventKind.MOTION
|
||||
)
|
||||
)
|
||||
else None,
|
||||
exists_fn=lambda device: device.has_capability(RingCapability.HISTORY),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingDoorBell | RingChime](
|
||||
key="volume",
|
||||
translation_key="volume",
|
||||
category=["chimes", "doorbots", "authorized_doorbots", "stickup_cams"],
|
||||
cls=RingSensor,
|
||||
value_fn=lambda device: device.volume,
|
||||
exists_fn=lambda device: isinstance(device, (RingDoorBell, RingChime)),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingOther](
|
||||
key="doorbell_volume",
|
||||
translation_key="doorbell_volume",
|
||||
category=["other"],
|
||||
cls=RingSensor,
|
||||
value_fn=lambda device: device.doorbell_volume,
|
||||
exists_fn=lambda device: isinstance(device, RingOther),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingOther](
|
||||
key="mic_volume",
|
||||
translation_key="mic_volume",
|
||||
category=["other"],
|
||||
cls=RingSensor,
|
||||
value_fn=lambda device: device.mic_volume,
|
||||
exists_fn=lambda device: isinstance(device, RingOther),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingOther](
|
||||
key="voice_volume",
|
||||
translation_key="voice_volume",
|
||||
category=["other"],
|
||||
cls=RingSensor,
|
||||
value_fn=lambda device: device.voice_volume,
|
||||
exists_fn=lambda device: isinstance(device, RingOther),
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="wifi_signal_category",
|
||||
translation_key="wifi_signal_category",
|
||||
category=["chimes", "doorbots", "authorized_doorbots", "stickup_cams", "other"],
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
cls=HealthDataRingSensor,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda device: device.wifi_signal_category,
|
||||
),
|
||||
RingSensorEntityDescription(
|
||||
RingSensorEntityDescription[RingGeneric](
|
||||
key="wifi_signal_strength",
|
||||
translation_key="wifi_signal_strength",
|
||||
category=["chimes", "doorbots", "authorized_doorbots", "stickup_cams", "other"],
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
cls=HealthDataRingSensor,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda device: device.wifi_signal_strength,
|
||||
),
|
||||
)
|
||||
|
@@ -1,15 +1,17 @@
|
||||
"""Component providing HA Siren support for Ring Chimes."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ring_doorbell.const import CHIME_TEST_SOUND_KINDS, KIND_DING
|
||||
from ring_doorbell import RingChime, RingEventKind
|
||||
|
||||
from homeassistant.components.siren import ATTR_TONE, SirenEntity, SirenEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity, exception_wrap
|
||||
|
||||
@@ -22,32 +24,31 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Create the sirens for the Ring devices."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
|
||||
async_add_entities(
|
||||
RingChimeSiren(device, coordinator) for device in devices["chimes"]
|
||||
RingChimeSiren(device, devices_coordinator)
|
||||
for device in ring_data.devices.chimes
|
||||
)
|
||||
|
||||
|
||||
class RingChimeSiren(RingEntity, SirenEntity):
|
||||
class RingChimeSiren(RingEntity[RingChime], SirenEntity):
|
||||
"""Creates a siren to play the test chimes of a Chime device."""
|
||||
|
||||
_attr_available_tones = list(CHIME_TEST_SOUND_KINDS)
|
||||
_attr_available_tones = [RingEventKind.DING.value, RingEventKind.MOTION.value]
|
||||
_attr_supported_features = SirenEntityFeature.TURN_ON | SirenEntityFeature.TONES
|
||||
_attr_translation_key = "siren"
|
||||
|
||||
def __init__(self, device, coordinator: RingDataCoordinator) -> None:
|
||||
def __init__(self, device: RingChime, coordinator: RingDataCoordinator) -> None:
|
||||
"""Initialize a Ring Chime siren."""
|
||||
super().__init__(device, coordinator)
|
||||
# Entity class attributes
|
||||
self._attr_unique_id = f"{self._device.id}-siren"
|
||||
|
||||
@exception_wrap
|
||||
def turn_on(self, **kwargs):
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Play the test sound on a Ring Chime device."""
|
||||
tone = kwargs.get(ATTR_TONE) or KIND_DING
|
||||
tone = kwargs.get(ATTR_TONE) or RingEventKind.DING.value
|
||||
|
||||
self._device.test_sound(kind=tone)
|
||||
|
@@ -4,7 +4,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ring_doorbell import RingGeneric, RingStickUpCam
|
||||
from ring_doorbell import RingStickUpCam
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -12,7 +12,8 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import DOMAIN, RING_DEVICES, RING_DEVICES_COORDINATOR
|
||||
from . import RingData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RingDataCoordinator
|
||||
from .entity import RingEntity, exception_wrap
|
||||
|
||||
@@ -33,23 +34,21 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Create the switches for the Ring devices."""
|
||||
devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES]
|
||||
coordinator: RingDataCoordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
RING_DEVICES_COORDINATOR
|
||||
]
|
||||
ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
devices_coordinator = ring_data.devices_coordinator
|
||||
|
||||
async_add_entities(
|
||||
SirenSwitch(device, coordinator)
|
||||
for device in devices["stickup_cams"]
|
||||
SirenSwitch(device, devices_coordinator)
|
||||
for device in ring_data.devices.stickup_cams
|
||||
if device.has_capability("siren")
|
||||
)
|
||||
|
||||
|
||||
class BaseRingSwitch(RingEntity, SwitchEntity):
|
||||
class BaseRingSwitch(RingEntity[RingStickUpCam], SwitchEntity):
|
||||
"""Represents a switch for controlling an aspect of a ring device."""
|
||||
|
||||
def __init__(
|
||||
self, device: RingGeneric, coordinator: RingDataCoordinator, device_type: str
|
||||
self, device: RingStickUpCam, coordinator: RingDataCoordinator, device_type: str
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(device, coordinator)
|
||||
@@ -62,26 +61,27 @@ class SirenSwitch(BaseRingSwitch):
|
||||
|
||||
_attr_translation_key = "siren"
|
||||
|
||||
def __init__(self, device, coordinator: RingDataCoordinator) -> None:
|
||||
def __init__(
|
||||
self, device: RingStickUpCam, coordinator: RingDataCoordinator
|
||||
) -> None:
|
||||
"""Initialize the switch for a device with a siren."""
|
||||
super().__init__(device, coordinator, "siren")
|
||||
self._no_updates_until = dt_util.utcnow()
|
||||
self._attr_is_on = device.siren > 0
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Call update method."""
|
||||
if self._no_updates_until > dt_util.utcnow():
|
||||
return
|
||||
|
||||
if (device := self._get_coordinator_device()) and isinstance(
|
||||
device, RingStickUpCam
|
||||
):
|
||||
self._attr_is_on = device.siren > 0
|
||||
device = self._get_coordinator_data().get_stickup_cam(
|
||||
self._device.device_api_id
|
||||
)
|
||||
self._attr_is_on = device.siren > 0
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@exception_wrap
|
||||
def _set_switch(self, new_state):
|
||||
def _set_switch(self, new_state: int) -> None:
|
||||
"""Update switch state, and causes Home Assistant to correctly update."""
|
||||
self._device.siren = new_state
|
||||
|
||||
|
@@ -101,7 +101,7 @@ async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> b
|
||||
return False
|
||||
|
||||
async def _error(error: Exception) -> None:
|
||||
_LOGGER.error("Error in Risco library: %s", error)
|
||||
_LOGGER.error("Error in Risco library", exc_info=error)
|
||||
|
||||
entry.async_on_unload(risco.add_error_handler(_error))
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["roborock"],
|
||||
"requirements": [
|
||||
"python-roborock==1.0.0",
|
||||
"python-roborock==2.0.0",
|
||||
"vacuum-map-parser-roborock==0.1.1"
|
||||
]
|
||||
}
|
||||
|
46
homeassistant/components/teslemetry/diagnostics.py
Normal file
46
homeassistant/components/teslemetry/diagnostics.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Provides diagnostics for Teslemetry."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
VEHICLE_REDACT = [
|
||||
"id",
|
||||
"user_id",
|
||||
"vehicle_id",
|
||||
"vin",
|
||||
"tokens",
|
||||
"id_s",
|
||||
"drive_state_active_route_latitude",
|
||||
"drive_state_active_route_longitude",
|
||||
"drive_state_latitude",
|
||||
"drive_state_longitude",
|
||||
"drive_state_native_latitude",
|
||||
"drive_state_native_longitude",
|
||||
]
|
||||
|
||||
ENERGY_REDACT = ["vin"]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
vehicles = [
|
||||
x.coordinator.data for x in hass.data[DOMAIN][config_entry.entry_id].vehicles
|
||||
]
|
||||
energysites = [
|
||||
x.coordinator.data for x in hass.data[DOMAIN][config_entry.entry_id].energysites
|
||||
]
|
||||
|
||||
# Return only the relevant children
|
||||
return {
|
||||
"vehicles": async_redact_data(vehicles, VEHICLE_REDACT),
|
||||
"energysites": async_redact_data(energysites, ENERGY_REDACT),
|
||||
}
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["whirlpool"],
|
||||
"requirements": ["whirlpool-sixth-sense==0.18.7"]
|
||||
"requirements": ["whirlpool-sixth-sense==0.18.8"]
|
||||
}
|
||||
|
@@ -34,14 +34,8 @@ class WithingsFlowHandler(
|
||||
def extra_authorize_data(self) -> dict[str, str]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {
|
||||
"scope": ",".join(
|
||||
[
|
||||
AuthScope.USER_INFO,
|
||||
AuthScope.USER_METRICS,
|
||||
AuthScope.USER_ACTIVITY,
|
||||
AuthScope.USER_SLEEP_EVENTS,
|
||||
]
|
||||
)
|
||||
"scope": f"{AuthScope.USER_INFO},{AuthScope.USER_METRICS},"
|
||||
f"{AuthScope.USER_ACTIVITY},{AuthScope.USER_SLEEP_EVENTS}"
|
||||
}
|
||||
|
||||
async def async_step_reauth(
|
||||
|
@@ -11,6 +11,7 @@ from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.setup import SetupPhases, async_pause_setup
|
||||
|
||||
from .const import CONF_PROVINCE, DOMAIN, PLATFORMS
|
||||
|
||||
@@ -23,7 +24,11 @@ async def _async_validate_country_and_province(
|
||||
if not country:
|
||||
return
|
||||
try:
|
||||
await hass.async_add_executor_job(country_holidays, country)
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# import executor job is used here because multiple integrations use
|
||||
# the holidays library and it is not thread safe to import it in parallel
|
||||
# https://github.com/python/cpython/issues/83065
|
||||
await hass.async_add_import_executor_job(country_holidays, country)
|
||||
except NotImplementedError as ex:
|
||||
async_create_issue(
|
||||
hass,
|
||||
@@ -41,9 +46,13 @@ async def _async_validate_country_and_province(
|
||||
if not province:
|
||||
return
|
||||
try:
|
||||
await hass.async_add_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# import executor job is used here because multiple integrations use
|
||||
# the holidays library and it is not thread safe to import it in parallel
|
||||
# https://github.com/python/cpython/issues/83065
|
||||
await hass.async_add_import_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
except NotImplementedError as ex:
|
||||
async_create_issue(
|
||||
hass,
|
||||
@@ -73,9 +82,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await _async_validate_country_and_province(hass, entry, country, province)
|
||||
|
||||
if country and CONF_LANGUAGE not in entry.options:
|
||||
cls: HolidayBase = await hass.async_add_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
|
||||
# import executor job is used here because multiple integrations use
|
||||
# the holidays library and it is not thread safe to import it in parallel
|
||||
# https://github.com/python/cpython/issues/83065
|
||||
cls: HolidayBase = await hass.async_add_import_executor_job(
|
||||
partial(country_holidays, country, subdiv=province)
|
||||
)
|
||||
default_language = cls.default_language
|
||||
new_options = entry.options.copy()
|
||||
new_options[CONF_LANGUAGE] = default_language
|
||||
|
@@ -428,7 +428,6 @@ class ZeroconfDiscovery:
|
||||
zeroconf, async_service_info, service_type, name
|
||||
),
|
||||
name=f"zeroconf lookup {name}.{service_type}",
|
||||
eager_start=False,
|
||||
)
|
||||
|
||||
async def _async_lookup_and_process_service_update(
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["zeroconf"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["zeroconf==0.132.0"]
|
||||
"requirements": ["zeroconf==0.132.2"]
|
||||
}
|
||||
|
@@ -151,7 +151,8 @@ async def async_get_device_diagnostics(
|
||||
client: Client = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT]
|
||||
identifiers = get_home_and_node_id_from_device_entry(device)
|
||||
node_id = identifiers[1] if identifiers else None
|
||||
assert (driver := client.driver)
|
||||
driver = client.driver
|
||||
assert driver
|
||||
if node_id is None or node_id not in driver.controller.nodes:
|
||||
raise ValueError(f"Node for device {device.id} can't be found")
|
||||
node = driver.controller.nodes[node_id]
|
||||
|
@@ -282,7 +282,23 @@ class ConfigEntry:
|
||||
pref_disable_new_entities: bool
|
||||
pref_disable_polling: bool
|
||||
version: int
|
||||
source: str
|
||||
minor_version: int
|
||||
disabled_by: ConfigEntryDisabler | None
|
||||
supports_unload: bool | None
|
||||
supports_remove_device: bool | None
|
||||
_supports_options: bool | None
|
||||
_supports_reconfigure: bool | None
|
||||
update_listeners: list[UpdateListenerType]
|
||||
_async_cancel_retry_setup: Callable[[], Any] | None
|
||||
_on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None
|
||||
reload_lock: asyncio.Lock
|
||||
_reauth_lock: asyncio.Lock
|
||||
_reconfigure_lock: asyncio.Lock
|
||||
_tasks: set[asyncio.Future[Any]]
|
||||
_background_tasks: set[asyncio.Future[Any]]
|
||||
_integration_for_domain: loader.Integration | None
|
||||
_tries: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -334,7 +350,7 @@ class ConfigEntry:
|
||||
_setter(self, "pref_disable_polling", pref_disable_polling)
|
||||
|
||||
# Source of the configuration (user, discovery, cloud)
|
||||
self.source = source
|
||||
_setter(self, "source", source)
|
||||
|
||||
# State of the entry (LOADED, NOT_LOADED)
|
||||
_setter(self, "state", state)
|
||||
@@ -355,22 +371,22 @@ class ConfigEntry:
|
||||
error_if_core=False,
|
||||
)
|
||||
disabled_by = ConfigEntryDisabler(disabled_by)
|
||||
self.disabled_by = disabled_by
|
||||
_setter(self, "disabled_by", disabled_by)
|
||||
|
||||
# Supports unload
|
||||
self.supports_unload: bool | None = None
|
||||
_setter(self, "supports_unload", None)
|
||||
|
||||
# Supports remove device
|
||||
self.supports_remove_device: bool | None = None
|
||||
_setter(self, "supports_remove_device", None)
|
||||
|
||||
# Supports options
|
||||
self._supports_options: bool | None = None
|
||||
_setter(self, "_supports_options", None)
|
||||
|
||||
# Supports reconfigure
|
||||
self._supports_reconfigure: bool | None = None
|
||||
_setter(self, "_supports_reconfigure", None)
|
||||
|
||||
# Listeners to call on update
|
||||
self.update_listeners: list[UpdateListenerType] = []
|
||||
_setter(self, "update_listeners", [])
|
||||
|
||||
# Reason why config entry is in a failed state
|
||||
_setter(self, "reason", None)
|
||||
@@ -378,25 +394,23 @@ class ConfigEntry:
|
||||
_setter(self, "error_reason_translation_placeholders", None)
|
||||
|
||||
# Function to cancel a scheduled retry
|
||||
self._async_cancel_retry_setup: Callable[[], Any] | None = None
|
||||
_setter(self, "_async_cancel_retry_setup", None)
|
||||
|
||||
# Hold list for actions to call on unload.
|
||||
self._on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None = (
|
||||
None
|
||||
)
|
||||
_setter(self, "_on_unload", None)
|
||||
|
||||
# Reload lock to prevent conflicting reloads
|
||||
self.reload_lock = asyncio.Lock()
|
||||
_setter(self, "reload_lock", asyncio.Lock())
|
||||
# Reauth lock to prevent concurrent reauth flows
|
||||
self._reauth_lock = asyncio.Lock()
|
||||
_setter(self, "_reauth_lock", asyncio.Lock())
|
||||
# Reconfigure lock to prevent concurrent reconfigure flows
|
||||
self._reconfigure_lock = asyncio.Lock()
|
||||
_setter(self, "_reconfigure_lock", asyncio.Lock())
|
||||
|
||||
self._tasks: set[asyncio.Future[Any]] = set()
|
||||
self._background_tasks: set[asyncio.Future[Any]] = set()
|
||||
_setter(self, "_tasks", set())
|
||||
_setter(self, "_background_tasks", set())
|
||||
|
||||
self._integration_for_domain: loader.Integration | None = None
|
||||
self._tries = 0
|
||||
_setter(self, "_integration_for_domain", None)
|
||||
_setter(self, "_tries", 0)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Representation of ConfigEntry."""
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from collections.abc import Callable, Container, Iterable, Mapping
|
||||
from contextlib import suppress
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
@@ -153,7 +153,7 @@ class FlowResult(TypedDict, Generic[_HandlerT], total=False):
|
||||
flow_id: Required[str]
|
||||
handler: Required[_HandlerT]
|
||||
last_step: bool | None
|
||||
menu_options: list[str] | dict[str, str]
|
||||
menu_options: Container[str]
|
||||
options: Mapping[str, Any]
|
||||
preview: str | None
|
||||
progress_action: str
|
||||
@@ -843,7 +843,7 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]):
|
||||
self,
|
||||
*,
|
||||
step_id: str | None = None,
|
||||
menu_options: list[str] | dict[str, str],
|
||||
menu_options: Container[str],
|
||||
description_placeholders: Mapping[str, str] | None = None,
|
||||
) -> _FlowResultT:
|
||||
"""Show a navigation menu to the user.
|
||||
|
@@ -957,7 +957,8 @@
|
||||
"color_extractor": {
|
||||
"name": "ColorExtractor",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true
|
||||
"config_flow": true,
|
||||
"single_config_entry": true
|
||||
},
|
||||
"comed": {
|
||||
"name": "Commonwealth Edison (ComEd)",
|
||||
|
@@ -801,7 +801,7 @@ class EntityPlatform:
|
||||
get_initial_options=entity.get_initial_entity_options,
|
||||
has_entity_name=entity.has_entity_name,
|
||||
hidden_by=hidden_by,
|
||||
known_object_ids=self.entities.keys(),
|
||||
known_object_ids=self.entities,
|
||||
original_device_class=entity.device_class,
|
||||
original_icon=entity.icon,
|
||||
original_name=entity_name,
|
||||
@@ -839,11 +839,13 @@ class EntityPlatform:
|
||||
if self.entity_namespace is not None:
|
||||
suggested_object_id = f"{self.entity_namespace} {suggested_object_id}"
|
||||
entity.entity_id = entity_registry.async_generate_entity_id(
|
||||
self.domain, suggested_object_id, self.entities.keys()
|
||||
self.domain, suggested_object_id, self.entities
|
||||
)
|
||||
|
||||
# Make sure it is valid in case an entity set the value themselves
|
||||
if not valid_entity_id(entity.entity_id):
|
||||
# Avoid calling valid_entity_id if we already know it is valid
|
||||
# since it already made it in the registry
|
||||
if not entity.registry_entry and not valid_entity_id(entity.entity_id):
|
||||
entity.add_to_platform_abort()
|
||||
raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}")
|
||||
|
||||
|
@@ -10,7 +10,7 @@ timer.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Hashable, Iterable, KeysView, Mapping
|
||||
from collections.abc import Callable, Container, Hashable, KeysView, Mapping
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from functools import cached_property
|
||||
@@ -714,7 +714,7 @@ class EntityRegistry(BaseRegistry):
|
||||
return list(self.entities.get_device_ids())
|
||||
|
||||
def _entity_id_available(
|
||||
self, entity_id: str, known_object_ids: Iterable[str] | None
|
||||
self, entity_id: str, known_object_ids: Container[str] | None
|
||||
) -> bool:
|
||||
"""Return True if the entity_id is available.
|
||||
|
||||
@@ -740,7 +740,7 @@ class EntityRegistry(BaseRegistry):
|
||||
self,
|
||||
domain: str,
|
||||
suggested_object_id: str,
|
||||
known_object_ids: Iterable[str] | None = None,
|
||||
known_object_ids: Container[str] | None = None,
|
||||
) -> str:
|
||||
"""Generate an entity ID that does not conflict.
|
||||
|
||||
@@ -753,7 +753,7 @@ class EntityRegistry(BaseRegistry):
|
||||
|
||||
test_string = preferred_string[:MAX_LENGTH_STATE_ENTITY_ID]
|
||||
if known_object_ids is None:
|
||||
known_object_ids = {}
|
||||
known_object_ids = set()
|
||||
|
||||
tries = 1
|
||||
while not self._entity_id_available(test_string, known_object_ids):
|
||||
@@ -773,7 +773,7 @@ class EntityRegistry(BaseRegistry):
|
||||
unique_id: str,
|
||||
*,
|
||||
# To influence entity ID generation
|
||||
known_object_ids: Iterable[str] | None = None,
|
||||
known_object_ids: Container[str] | None = None,
|
||||
suggested_object_id: str | None = None,
|
||||
# To disable or hide an entity if it gets created
|
||||
disabled_by: RegistryEntryDisabler | None = None,
|
||||
|
@@ -30,11 +30,9 @@ async def async_import_module(hass: HomeAssistant, name: str) -> ModuleType:
|
||||
if module := cache.get(name):
|
||||
return module
|
||||
|
||||
failure_cache: dict[str, BaseException] = hass.data.setdefault(
|
||||
DATA_IMPORT_FAILURES, {}
|
||||
)
|
||||
if exception := failure_cache.get(name):
|
||||
raise exception
|
||||
failure_cache: dict[str, bool] = hass.data.setdefault(DATA_IMPORT_FAILURES, {})
|
||||
if name in failure_cache:
|
||||
raise ModuleNotFoundError(f"{name} not found", name=name)
|
||||
|
||||
import_futures: dict[str, asyncio.Future[ModuleType]]
|
||||
import_futures = hass.data.setdefault(DATA_IMPORT_FUTURES, {})
|
||||
@@ -51,7 +49,8 @@ async def async_import_module(hass: HomeAssistant, name: str) -> ModuleType:
|
||||
module = await hass.async_add_import_executor_job(_get_module, cache, name)
|
||||
import_future.set_result(module)
|
||||
except BaseException as ex:
|
||||
failure_cache[name] = ex
|
||||
if isinstance(ex, ModuleNotFoundError):
|
||||
failure_cache[name] = True
|
||||
import_future.set_exception(ex)
|
||||
with suppress(BaseException):
|
||||
# Set the exception retrieved flag on the future since
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Coroutine, Mapping
|
||||
from collections.abc import Callable, Container, Coroutine, Mapping
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
import types
|
||||
@@ -102,7 +102,7 @@ class SchemaFlowMenuStep(SchemaFlowStep):
|
||||
"""Define a config or options flow menu step."""
|
||||
|
||||
# Menu options
|
||||
options: list[str] | dict[str, str]
|
||||
options: Container[str]
|
||||
|
||||
|
||||
class SchemaCommonFlowHandler:
|
||||
|
@@ -1250,7 +1250,7 @@ async def _async_stop_scripts_after_shutdown(
|
||||
_LOGGER.warning("Stopping scripts running too long after shutdown: %s", names)
|
||||
await asyncio.gather(
|
||||
*(
|
||||
script["instance"].async_stop(update_state=False)
|
||||
create_eager_task(script["instance"].async_stop(update_state=False))
|
||||
for script in running_scripts
|
||||
)
|
||||
)
|
||||
@@ -1269,7 +1269,10 @@ async def _async_stop_scripts_at_shutdown(hass: HomeAssistant, event: Event) ->
|
||||
names = ", ".join([script["instance"].name for script in running_scripts])
|
||||
_LOGGER.debug("Stopping scripts running at shutdown: %s", names)
|
||||
await asyncio.gather(
|
||||
*(script["instance"].async_stop() for script in running_scripts)
|
||||
*(
|
||||
create_eager_task(script["instance"].async_stop())
|
||||
for script in running_scripts
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -1695,6 +1698,9 @@ class Script:
|
||||
# return false after the other script runs were stopped until our task
|
||||
# resumes running.
|
||||
self._log("Restarting")
|
||||
# Important: yield to the event loop to allow the script to start in case
|
||||
# the script is restarting itself.
|
||||
await asyncio.sleep(0)
|
||||
await self.async_stop(update_state=False, spare=run)
|
||||
|
||||
if started_action:
|
||||
@@ -1724,11 +1730,13 @@ class Script:
|
||||
# asyncio.shield as asyncio.shield yields to the event loop, which would cause
|
||||
# us to wait for script runs added after the call to async_stop.
|
||||
aws = [
|
||||
asyncio.create_task(run.async_stop()) for run in self._runs if run != spare
|
||||
create_eager_task(run.async_stop()) for run in self._runs if run != spare
|
||||
]
|
||||
if not aws:
|
||||
return
|
||||
await asyncio.shield(self._async_stop(aws, update_state, spare))
|
||||
await asyncio.shield(
|
||||
create_eager_task(self._async_stop(aws, update_state, spare))
|
||||
)
|
||||
|
||||
async def _async_get_condition(self, config):
|
||||
if isinstance(config, template.Template):
|
||||
|
@@ -93,6 +93,7 @@ def _base_components() -> dict[str, ModuleType]:
|
||||
light,
|
||||
lock,
|
||||
media_player,
|
||||
notify,
|
||||
remote,
|
||||
siren,
|
||||
todo,
|
||||
@@ -112,6 +113,7 @@ def _base_components() -> dict[str, ModuleType]:
|
||||
"light": light,
|
||||
"lock": lock,
|
||||
"media_player": media_player,
|
||||
"notify": notify,
|
||||
"remote": remote,
|
||||
"siren": siren,
|
||||
"todo": todo,
|
||||
|
@@ -976,6 +976,8 @@ class Integration:
|
||||
comp = await self.hass.async_add_import_executor_job(
|
||||
self._get_component, True
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
raise
|
||||
except ImportError as ex:
|
||||
load_executor = False
|
||||
_LOGGER.debug(
|
||||
@@ -1115,6 +1117,8 @@ class Integration:
|
||||
self._load_platforms, platform_names
|
||||
)
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
raise
|
||||
except ImportError as ex:
|
||||
_LOGGER.debug(
|
||||
"Failed to import %s platforms %s in executor",
|
||||
|
@@ -5,8 +5,9 @@ aiodiscover==2.0.0
|
||||
aiodns==3.2.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
aiohttp-zlib-ng==0.3.1
|
||||
aiohttp==3.9.3
|
||||
aiohttp==3.9.4
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp_session==2.12.0
|
||||
astral==2.2
|
||||
async-interrupt==1.1.1
|
||||
async-upnp-client==0.38.3
|
||||
@@ -31,7 +32,7 @@ habluetooth==2.4.2
|
||||
hass-nabucasa==0.78.0
|
||||
hassil==1.6.1
|
||||
home-assistant-bluetooth==1.12.0
|
||||
home-assistant-frontend==20240404.1
|
||||
home-assistant-frontend==20240404.2
|
||||
home-assistant-intents==2024.4.3
|
||||
httpx==0.27.0
|
||||
ifaddr==0.2.0
|
||||
@@ -61,7 +62,7 @@ voluptuous-serialize==2.6.0
|
||||
voluptuous==0.13.1
|
||||
webrtc-noise-gain==1.2.3
|
||||
yarl==1.9.4
|
||||
zeroconf==0.132.0
|
||||
zeroconf==0.132.2
|
||||
|
||||
# Constrain pycryptodome to avoid vulnerability
|
||||
# see https://github.com/home-assistant/core/pull/16238
|
||||
|
0
homeassistant/py.typed
Normal file
0
homeassistant/py.typed
Normal file
10
mypy.ini
10
mypy.ini
@@ -3391,6 +3391,16 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.ring.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.rituals_perfume_genie.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
@@ -24,8 +24,9 @@ classifiers = [
|
||||
requires-python = ">=3.12.0"
|
||||
dependencies = [
|
||||
"aiodns==3.2.0",
|
||||
"aiohttp==3.9.3",
|
||||
"aiohttp==3.9.4",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp_session==2.12.0",
|
||||
"aiohttp-fast-url-dispatcher==0.3.0",
|
||||
"aiohttp-zlib-ng==0.3.1",
|
||||
"astral==2.2",
|
||||
@@ -488,6 +489,8 @@ filterwarnings = [
|
||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators",
|
||||
# https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base",
|
||||
# https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat",
|
||||
# https://github.com/fwestenberg/devialet/pull/6 - >1.4.5
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api",
|
||||
# https://github.com/jaraco/jaraco.abode/commit/9e3e789efc96cddcaa15f920686bbeb79a7469e0 - update jaraco.abode to >=5.1.0
|
||||
@@ -504,13 +507,23 @@ filterwarnings = [
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol",
|
||||
# https://github.com/hunterjm/python-onvif-zeep-async/pull/51 - >3.1.12
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client",
|
||||
# https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8 - >=2.13.12
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:google.pubsub_v1.services.publisher.client",
|
||||
# https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*stringcase",
|
||||
# https://github.com/grahamwetzler/smart-meter-texas/pull/143 - >0.5.3
|
||||
"ignore:ssl.OP_NO_SSL\\*/ssl.OP_NO_TLS\\* options are deprecated:DeprecationWarning:smart_meter_texas",
|
||||
# https://github.com/timmo001/system-bridge-connector/pull/27 - >= 4.1.0
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:systembridgeconnector.version",
|
||||
# https://github.com/jschlyter/ttls/commit/d64f1251397b8238cf6a35bea64784de25e3386c - >=1.8.1
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:ttls",
|
||||
# https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1
|
||||
"ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils",
|
||||
|
||||
# -- fixed for Python 3.13
|
||||
# https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2
|
||||
"ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:wyoming.audio",
|
||||
|
||||
# -- other
|
||||
# Locale changes might take some time to resolve upstream
|
||||
"ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud",
|
||||
@@ -537,15 +550,47 @@ filterwarnings = [
|
||||
# https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15
|
||||
# https://github.com/koolsb/pyblackbird/pull/9 -> closed
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird",
|
||||
# https://pypi.org/project/pybotvac/ - v0.0.24 - 2023-01-02
|
||||
# https://github.com/stianaske/pybotvac/pull/81 -> closed
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*pybotvac.robot",
|
||||
# https://github.com/pkkid/python-plexapi/pull/1244 - v4.15.10 -> new issue same file
|
||||
# https://github.com/pkkid/python-plexapi/pull/1244 - v4.15.11 -> new issue same file
|
||||
# https://github.com/pkkid/python-plexapi/pull/1370 -> Not fixed here
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*plexapi.base",
|
||||
# https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*pyws66i",
|
||||
# https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil",
|
||||
# - pkg_resources
|
||||
# https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast",
|
||||
# https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data",
|
||||
# https://pypi.org/project/pybotvac/ - v0.0.25 - 2024-04-11
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version",
|
||||
# https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom",
|
||||
# https://pypi.org/project/velbus-aio/ - v2024.4.0
|
||||
# https://github.com/Cereal2nd/velbus-aio/blob/2024.4.0/velbusaio/handler.py#L13
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler",
|
||||
|
||||
# -- Python 3.13
|
||||
# HomeAssistant
|
||||
"ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:homeassistant.components.assist_pipeline.websocket_api",
|
||||
# https://pypi.org/project/pylutron/ - v0.2.12 - 2024-02-12
|
||||
# https://github.com/thecynic/pylutron/issues/89
|
||||
"ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pylutron",
|
||||
# https://pypi.org/project/SpeechRecognition/ - v3.10.3 - 2024-03-30
|
||||
# https://github.com/Uberi/speech_recognition/blob/3.10.3/speech_recognition/__init__.py#L7
|
||||
"ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition",
|
||||
# https://pypi.org/project/voip-utils/ - v0.1.0 - 2023-06-28
|
||||
# https://github.com/home-assistant-libs/voip-utils/blob/v0.1.0/voip_utils/rtp_audio.py#L2
|
||||
"ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio",
|
||||
|
||||
# -- Python 3.13 - unmaintained projects, last release about 2+ years
|
||||
# https://pypi.org/project/pydub/ - v0.25.1 - 2021-03-10
|
||||
"ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pydub.utils",
|
||||
# https://github.com/heathbar/plum-lightpad-python/issues/7 - v0.0.11 - 2018-10-16
|
||||
"ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:plumlightpad.lightpad",
|
||||
# https://pypi.org/project/pyws66i/ - v1.1 - 2022-04-05
|
||||
# https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2
|
||||
"ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i",
|
||||
|
||||
# -- unmaintained projects, last release about 2+ years
|
||||
# https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04
|
||||
@@ -559,6 +604,10 @@ filterwarnings = [
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models",
|
||||
# https://pypi.org/project/foobot_async/ - v1.0.0 - 2020-11-24
|
||||
"ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async",
|
||||
# https://pypi.org/project/habitipy/ - v0.3.0 - 2019-01-14
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api",
|
||||
# https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig",
|
||||
# https://pypi.org/project/influxdb/ - v5.3.1 - 2020-11-11 (archived)
|
||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol",
|
||||
# https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark`
|
||||
@@ -575,6 +624,8 @@ filterwarnings = [
|
||||
"ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder",
|
||||
# https://pypi.org/project/passlib/ - v1.7.4 - 2020-10-08
|
||||
"ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning:passlib.utils",
|
||||
# https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight",
|
||||
# https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery",
|
||||
"ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)",
|
||||
@@ -586,6 +637,10 @@ filterwarnings = [
|
||||
"ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss",
|
||||
# https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann",
|
||||
# https://pypi.org/project/pyowm/ - v3.3.0 - 2022-02-14
|
||||
# https://github.com/csparpa/pyowm/issues/435
|
||||
# https://github.com/csparpa/pyowm/blob/3.3.0/pyowm/commons/cityidregistry.py#L7
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pyowm.commons.cityidregistry",
|
||||
# https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21
|
||||
"ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils",
|
||||
# https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19
|
||||
@@ -604,7 +659,7 @@ filterwarnings = [
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
required-version = ">=0.3.4"
|
||||
required-version = ">=0.3.7"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
@@ -625,6 +680,7 @@ select = [
|
||||
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes/autoflake
|
||||
"FLY", # flynt
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"INP", # flake8-no-pep420
|
||||
@@ -645,6 +701,7 @@ select = [
|
||||
"RUF005", # Consider iterable unpacking instead of concatenation
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"RUF013", # PEP 484 prohibits implicit Optional
|
||||
"RUF018", # Avoid assignment expressions in assert statements
|
||||
# "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up
|
||||
"S102", # Use of exec detected
|
||||
"S103", # bad-file-permissions
|
||||
@@ -694,6 +751,10 @@ ignore = [
|
||||
"PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception
|
||||
"PT012", # `pytest.raises()` block should contain a single simple statement
|
||||
"PT018", # Assertion should be broken down into multiple parts
|
||||
"RUF001", # String contains ambiguous unicode character.
|
||||
"RUF002", # Docstring contains ambiguous unicode character.
|
||||
"RUF003", # Comment contains ambiguous unicode character.
|
||||
"RUF015", # Prefer next(...) over single element slice
|
||||
"SIM102", # Use a single if statement instead of nested if statements
|
||||
"SIM108", # Use ternary operator {contents} instead of if-else-block
|
||||
"SIM115", # Use context handler for opening files
|
||||
|
@@ -4,8 +4,9 @@
|
||||
|
||||
# Home Assistant Core
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.9.3
|
||||
aiohttp==3.9.4
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp_session==2.12.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
aiohttp-zlib-ng==0.3.1
|
||||
astral==2.2
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user