Compare commits

..

58 Commits

Author SHA1 Message Date
Franck Nijhof 43a1eb043b Bumped version to 2023.5.0b6 2023-05-01 22:55:49 +02:00
Bram Kragten 6b77775ed5 Update frontend to 20230501.0 (#92339) 2023-05-01 22:55:34 +02:00
Michael Hansen 7077d23127 Bump voip-utils to 0.0.6 (#92334) 2023-05-01 22:55:31 +02:00
J. Nick Koston c7eac0ebbb Avoid starting ONVIF PullPoint if the camera reports its unsupported (#92333) 2023-05-01 22:55:27 +02:00
David F. Mulcahey 7f13033f69 Don't poll ZHA electrical measurement sensors unnecessarily (#92330) 2023-05-01 22:55:23 +02:00
Paulus Schoutsen eba201e71b Add voip configuration url (#92326) 2023-05-01 22:55:20 +02:00
G Johansson 1e9d777201 Fix db_url issue in SQL (#92324)
* db_url fix

* Add test

* assert entry.options
2023-05-01 22:55:16 +02:00
J. Nick Koston 030b7f8a37 Bump sqlalchemy to 2.0.12 (#92315)
changelog: https://docs.sqlalchemy.org/en/20/changelog/changelog_20.html#change-2.0.12
2023-05-01 22:55:12 +02:00
J. Nick Koston 8cbc69fc92 Retry onvif setup when it is unexpectedly cancelled (#92313)
* Retry onvif setup when it is unexpectedly cancelled

fixes #92308

* Retry onvif setup when it is unexpectedly cancelled

fixes #92308
2023-05-01 22:55:08 +02:00
J. Nick Koston 2a5f5ea039 Reduce size of migration transactions to accommodate slow/busy systems (#92312)
* Reduce size of migration transactions to accommodate slow/busy systems

related issue #91489

* handle overloaded RPIs better
2023-05-01 22:55:04 +02:00
Michael Hansen 0ba662e7bc Allow configuring SIP port in VoIP (#92210)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2023-05-01 22:54:59 +02:00
Franck Nijhof 05530d656a Bumped version to 2023.5.0b5 2023-04-30 20:16:39 +02:00
Jan Bouwhuis 2b2be6a333 Fix mqtt not available when starting snips (#92296) 2023-04-30 20:16:28 +02:00
J. Nick Koston 5bd54490ea Ensure onvif webhook can be registered (#92295) 2023-04-30 20:16:25 +02:00
J. Nick Koston 00a28caa6d Bump bleak to 0.20.2 (#92294) 2023-04-30 20:16:21 +02:00
J. Nick Koston c4aa6ba262 Bump beacontools to fix conflict with construct<2.10 and >=2.8.16 (#92293) 2023-04-30 20:16:18 +02:00
J. Nick Koston 7a90db903b Prevent pysnmp from being installed as it does not work with newer python (#92292) 2023-04-30 20:16:14 +02:00
Robert Hillis fe279c8593 Add missing fstrings in Local Calendar (#92288) 2023-04-30 20:16:10 +02:00
Maximilian ddf5a9fbcc Bump pynina to 0.3.0 (#92286) 2023-04-30 20:16:07 +02:00
J. Nick Koston 093d5d6176 Fix august lock state when API reports locking and locked with the same timestamp (#92276) 2023-04-30 20:16:01 +02:00
Paulus Schoutsen eb586c7144 Bumped version to 2023.5.0b4 2023-04-29 21:23:22 -04:00
J. Nick Koston ec15a03706 Handle AttributeError from wrong port in ONVIF config flow (#92272)
* Handle AttributeError from wrong port in ONVIF config flow

fixes
```
2023-04-29 19:17:22.289 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/aiohttp/web_protocol.py", line 433, in _handle_request
    resp = await request_handler(request)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/aiohttp/web_app.py", line 504, in _handle
    resp = await handler(request)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/aiohttp/web_middlewares.py", line 117, in impl
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/security_filter.py", line 85, in security_filter_middleware
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/forwarded.py", line 100, in forwarded_middleware
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/request_context.py", line 28, in request_context_middleware
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/ban.py", line 80, in ban_middleware
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/auth.py", line 235, in auth_middleware
    return await handler(request)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/view.py", line 146, in handle
    result = await result
  File "/Users/bdraco/home-assistant/homeassistant/components/config/config_entries.py", line 180, in post
    return await super().post(request, flow_id)
  File "/Users/bdraco/home-assistant/homeassistant/components/http/data_validator.py", line 72, in wrapper
    result = await method(view, request, data, *args, **kwargs)
  File "/Users/bdraco/home-assistant/homeassistant/helpers/data_entry_flow.py", line 110, in post
    result = await self._flow_mgr.async_configure(flow_id, data)
  File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 271, in async_configure
    result = await self._async_handle_step(
  File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 367, in _async_handle_step
    result: FlowResult = await getattr(flow, method)(user_input)
  File "/Users/bdraco/home-assistant/homeassistant/components/onvif/config_flow.py", line 233, in async_step_configure
    errors, description_placeholders = await self.async_setup_profiles()
  File "/Users/bdraco/home-assistant/homeassistant/components/onvif/config_flow.py", line 277, in async_setup_profiles
    await device.update_xaddrs()
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/onvif/client.py", line 433, in update_xaddrs
    capabilities = await devicemgmt.GetCapabilities({"Category": "All"})
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/proxy.py", line 64, in __call__
    return await self._proxy._binding.send_async(
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/wsdl/bindings/soap.py", line 164, in send_async
    return self.process_reply(client, operation_obj, response)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/wsdl/bindings/soap.py", line 204, in process_reply
    doc = parse_xml(content, self.transport, settings=client.settings)
  File "/Users/bdraco/home-assistant/venv/lib/python3.10/site-packages/zeep/loader.py", line 51, in parse_xml
    docinfo = elementtree.getroottree().docinfo
AttributeError: NoneType object has no attribute getroottree
```

* port

* Revert "port"

This reverts commit 4693f3f33a.

* misfire
2023-04-29 21:23:16 -04:00
J. Nick Koston 24b851c184 Auto repair incorrect collation on MySQL schema (#92270)
* Auto repair incorrect collation on MySQL schema

As we do more union queries in 2023.5.x if there is a mismatch
between collations on tables, they will fail with an error
that is hard for the user to figure out how to fix

`Error executing query: (MySQLdb.OperationalError) (1271, "Illegal mix of collations for operation UNION")`

This was reported in the #beta channel and by PM from others
so the problem is not isolated to a single user

https://discord.com/channels/330944238910963714/427516175237382144/1100908739910963272

* test with ascii since older maraidb versions may not work otherwise

* Revert "test with ascii since older maraidb versions may not work otherwise"

This reverts commit 787fda1aefcd8418a28a8a8f430e7e7232218ef8.t

* older version need to check collation_server because the collation is not reflected if its the default
2023-04-29 21:23:15 -04:00
Michael a8539b89e8 Fix call deflection update in Fritz!Tools (#92267)
fix
2023-04-29 21:23:14 -04:00
Jan Bouwhuis 8cf1ed81a8 Fix MQTT certificate files setup (#92266) 2023-04-29 21:23:13 -04:00
Robert Hillis fe452452e6 Fix Google Mail Sensor key error (#92262)
Fix Google Mail key error
2023-04-29 21:23:13 -04:00
Michael Hansen c632d27197 Add VoIP error tone (#92260)
* Play error tone when pipeline error occurs

* Play listening tone at the start of each cycle
2023-04-29 21:23:12 -04:00
J. Nick Koston 6a6eba1ca3 Handle onvif errors when detail is returned as bytes (#92259) 2023-04-29 21:23:11 -04:00
J. Nick Koston a5241b3118 Pin pyasn1 and pysnmplib since pyasn1 0.5.0 has breaking changes and pysnmp-pyasn1 and pyasn1 are both using the pyasn1 namespace (#92254) 2023-04-29 21:23:10 -04:00
Franck Nijhof 3bab40753d Bumped version to 2023.5.0b3 2023-04-29 19:03:08 +02:00
J. Nick Koston 546c68196e Bump pyunifiprotect to 4.8.3 (#92251) 2023-04-29 19:02:59 +02:00
Bouwe Westerdijk 379db033af Bump plugwise to v0.31.1 (#92249) 2023-04-29 19:02:55 +02:00
Franck Nijhof 4b9355e1ca Fix unknown/unavailable source sensor in Filter entities (#92241) 2023-04-29 19:02:52 +02:00
Franck Nijhof 89eca22b93 Fix history YAML deprecation (#92238) 2023-04-29 19:02:48 +02:00
Allen Porter 2cb665a1d9 Add more detail to invalid rrule calendar error message (#92222)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2023-04-29 19:02:45 +02:00
Michael Davie 1d54a0ed3d Bump env_canada to 0.5.34 (#92216)
Bump env_canada to v.0.5.34
2023-04-29 19:02:41 +02:00
jjlawren 7af1521812 Bump sonos-websocket to 0.1.0 (#92209)
Bump sonos-websocket to 0.1.0
2023-04-29 19:02:38 +02:00
Tom Harris c8cc6bfbb7 Fix Insteon scenes with disabled entities (#92137) 2023-04-29 19:02:34 +02:00
Rajeevan 401e61588c Fix solaredge-local protobuf exception (#92090) 2023-04-29 19:02:31 +02:00
Michael 3f948da2af Turn AVM FRITZ!Box Tools call deflection switches into coordinator entities (#91913)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
2023-04-29 19:02:27 +02:00
Mick Vleeshouwer aafbc64e02 Revert "Add silent option for DynamicShutter (ogp:Shutter) in Overkiz" (#91354) 2023-04-29 19:02:24 +02:00
rikroe e460bc7ecb Move BMW Target SoC to number platform (#91081)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: rikroe <rikroe@users.noreply.github.com>
2023-04-29 19:02:19 +02:00
Franck Nijhof 1b39abe3bc Bumped version to 2023.5.0b2 2023-04-28 21:42:27 +02:00
J. Nick Koston 29bff59707 Fix missing preset_mode feature in bond fans (#92202) 2023-04-28 21:42:18 +02:00
Jean-François Roy faa8f38fa8 Add missing PRESET_MODE feature to BAF fans (#92200) 2023-04-28 21:42:15 +02:00
Paul Bottein 1f6dbe96f6 Update frontend to 20230428.0 (#92190) 2023-04-28 21:42:12 +02:00
Jan Bouwhuis 98075da069 Fix mqtt subscribe debouncer initial delay too long when birth message is disabled (#92188)
Fix mqtt subscribe deboucer initial delay
2023-04-28 21:42:08 +02:00
David F. Mulcahey 652bb8ef95 Fix ZHA device triggers (#92186)
* Fix missing endpoint data on ZHA events

* revert to flat structure

* update test
2023-04-28 21:42:05 +02:00
Nolan Gilley 96d2b53798 Upgrade lakeside to 0.13 (#92173) 2023-04-28 21:42:01 +02:00
Raman Gupta 25d621ab94 Bump pyvizio to 0.1.61 (#92161) 2023-04-28 21:41:58 +02:00
Erik Montnemery fa3f19e7bf Keep expose setting in sync for assist (#92158)
* Keep expose setting in sync for assist

* Fix initialization, add test

* Fix tests

* Add AgentManager.async_setup

* Fix typo

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2023-04-28 21:41:53 +02:00
jjlawren 412ea937ff Properly resolve media_source URLs for Sonos announcements (#92154)
Properly resolve media_source URLs for Sonos announcements
2023-04-28 21:41:46 +02:00
Luke b7f5c144a8 Bump Roborock to 0.8.3 (#92151) 2023-04-28 21:41:42 +02:00
J. Nick Koston 658128c892 Fix ignored apple tvs being scanned over and over (#92150) 2023-04-28 21:41:38 +02:00
J. Nick Koston ff2f6029ce Ensure purge can cleanup old format detached states in the database (#92145) 2023-04-28 21:41:35 +02:00
puddly 8017a04efe Fix ZHA startup failure with the Konke button (#92144)
* Ensure devices with bad cluster subclasses do not prevent startup

* Explicitly unit test an affected SML001 device

* Do not use invalid `hue_occupancy` attribute name

* Actually remove `hue_occupancy`

* Bump ZHA dependencies
2023-04-28 21:41:31 +02:00
G Johansson ef350949fd Fix options flow Workday (#92140)
* Fix options flow workday

* simpler
2023-04-28 21:41:26 +02:00
Luke 7b1b3970b1 Bump roborock to 0.8.1 for beta fixes (#92131)
* bump to 0.8.1

* add tests for new config flow errors

* removed logs for known errors
2023-04-28 21:40:35 +02:00
124 changed files with 2119 additions and 432 deletions
@@ -324,18 +324,29 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
all_identifiers = set(self.atv.all_identifiers)
discovered_ip_address = str(self.atv.address)
for entry in self._async_current_entries():
if not all_identifiers.intersection(
existing_identifiers = set(
entry.data.get(CONF_IDENTIFIERS, [entry.unique_id])
):
)
if not all_identifiers.intersection(existing_identifiers):
continue
if entry.data.get(CONF_ADDRESS) != discovered_ip_address:
combined_identifiers = existing_identifiers | all_identifiers
if entry.data.get(
CONF_ADDRESS
) != discovered_ip_address or combined_identifiers != set(
entry.data.get(CONF_IDENTIFIERS, [])
):
self.hass.config_entries.async_update_entry(
entry,
data={**entry.data, CONF_ADDRESS: discovered_ip_address},
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(entry.entry_id)
data={
**entry.data,
CONF_ADDRESS: discovered_ip_address,
CONF_IDENTIFIERS: list(combined_identifiers),
},
)
if entry.source != config_entries.SOURCE_IGNORE:
self.hass.async_create_task(
self.hass.config_entries.async_reload(entry.entry_id)
)
if not allow_exist:
raise DeviceAlreadyConfigured()
+1 -1
View File
@@ -4,7 +4,7 @@ from __future__ import annotations
import logging
from typing import Any
from atenpdu import AtenPE, AtenPEError
from atenpdu import AtenPE, AtenPEError # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.switch import (
+5 -5
View File
@@ -3,6 +3,7 @@ import asyncio
import logging
from aiohttp import ClientError
from yalexs.util import get_latest_activity
from homeassistant.core import callback
from homeassistant.helpers.debounce import Debouncer
@@ -169,12 +170,11 @@ class ActivityStream(AugustSubscriberMixin):
device_id = activity.device_id
activity_type = activity.activity_type
device_activities = self._latest_activities.setdefault(device_id, {})
lastest_activity = device_activities.get(activity_type)
# Ignore activities that are older than the latest one
# Ignore activities that are older than the latest one unless it is a non
# locking or unlocking activity with the exact same start time.
if (
lastest_activity
and lastest_activity.activity_start_time >= activity.activity_start_time
get_latest_activity(activity, device_activities.get(activity_type))
!= activity
):
continue
+18 -9
View File
@@ -5,7 +5,7 @@ from typing import Any
from aiohttp import ClientResponseError
from yalexs.activity import SOURCE_PUBNUB, ActivityType
from yalexs.lock import LockStatus
from yalexs.util import update_lock_detail_from_activity
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity
from homeassistant.config_entries import ConfigEntry
@@ -90,17 +90,26 @@ class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity):
@callback
def _update_from_data(self):
"""Get the latest state of the sensor and update activity."""
lock_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id,
{ActivityType.LOCK_OPERATION, ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR},
activity_stream = self._data.activity_stream
device_id = self._device_id
if lock_activity := activity_stream.get_latest_device_activity(
device_id,
{ActivityType.LOCK_OPERATION},
):
self._attr_changed_by = lock_activity.operated_by
lock_activity_without_operator = activity_stream.get_latest_device_activity(
device_id,
{ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR},
)
if lock_activity is not None:
self._attr_changed_by = lock_activity.operated_by
update_lock_detail_from_activity(self._detail, lock_activity)
# If the source is pubnub the lock must be online since its a live update
if lock_activity.source == SOURCE_PUBNUB:
if latest_activity := get_latest_activity(
lock_activity_without_operator, lock_activity
):
if latest_activity.source == SOURCE_PUBNUB:
# If the source is pubnub the lock must be online since its a live update
self._detail.set_online(True)
update_lock_detail_from_activity(self._detail, latest_activity)
bridge_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, {ActivityType.BRIDGE_OPERATION}
@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==1.3.2", "yalexs-ble==2.1.16"]
"requirements": ["yalexs==1.3.3", "yalexs-ble==2.1.16"]
}
+5 -1
View File
@@ -39,7 +39,11 @@ async def async_setup_entry(
class BAFFan(BAFEntity, FanEntity):
"""BAF ceiling fan component."""
_attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION
_attr_supported_features = (
FanEntityFeature.SET_SPEED
| FanEntityFeature.DIRECTION
| FanEntityFeature.PRESET_MODE
)
_attr_preset_modes = [PRESET_MODE_AUTO]
_attr_speed_count = SPEED_COUNT
@@ -15,7 +15,7 @@
],
"quality_scale": "internal",
"requirements": [
"bleak==0.20.1",
"bleak==0.20.2",
"bleak-retry-connector==3.0.2",
"bluetooth-adapters==0.15.3",
"bluetooth-auto-recovery==1.0.3",
@@ -41,6 +41,7 @@ PLATFORMS = [
Platform.DEVICE_TRACKER,
Platform.LOCK,
Platform.NOTIFY,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
]
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"iot_class": "cloud_polling",
"loggers": ["bimmer_connected"],
"requirements": ["bimmer_connected==0.13.0"]
"requirements": ["bimmer_connected==0.13.2"]
}
@@ -0,0 +1,120 @@
"""Number platform for BMW."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
import logging
from typing import Any
from bimmer_connected.models import MyBMWAPIError
from bimmer_connected.vehicle import MyBMWVehicle
from homeassistant.components.number import (
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import BMWBaseEntity
from .const import DOMAIN
from .coordinator import BMWDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass
class BMWRequiredKeysMixin:
"""Mixin for required keys."""
value_fn: Callable[[MyBMWVehicle], float | int | None]
remote_service: Callable[[MyBMWVehicle, float | int], Coroutine[Any, Any, Any]]
@dataclass
class BMWNumberEntityDescription(NumberEntityDescription, BMWRequiredKeysMixin):
"""Describes BMW number entity."""
is_available: Callable[[MyBMWVehicle], bool] = lambda _: False
dynamic_options: Callable[[MyBMWVehicle], list[str]] | None = None
mode: NumberMode = NumberMode.AUTO
NUMBER_TYPES: list[BMWNumberEntityDescription] = [
BMWNumberEntityDescription(
key="target_soc",
name="Target SoC",
device_class=NumberDeviceClass.BATTERY,
is_available=lambda v: v.is_remote_set_target_soc_enabled,
native_max_value=100.0,
native_min_value=20.0,
native_step=5.0,
mode=NumberMode.SLIDER,
value_fn=lambda v: v.fuel_and_battery.charging_target,
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(
target_soc=int(o)
),
icon="mdi:battery-charging-medium",
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the MyBMW number from config entry."""
coordinator: BMWDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
entities: list[BMWNumber] = []
for vehicle in coordinator.account.vehicles:
if not coordinator.read_only:
entities.extend(
[
BMWNumber(coordinator, vehicle, description)
for description in NUMBER_TYPES
if description.is_available(vehicle)
]
)
async_add_entities(entities)
class BMWNumber(BMWBaseEntity, NumberEntity):
"""Representation of BMW Number entity."""
entity_description: BMWNumberEntityDescription
def __init__(
self,
coordinator: BMWDataUpdateCoordinator,
vehicle: MyBMWVehicle,
description: BMWNumberEntityDescription,
) -> None:
"""Initialize an BMW Number."""
super().__init__(coordinator, vehicle)
self.entity_description = description
self._attr_unique_id = f"{vehicle.vin}-{description.key}"
self._attr_mode = description.mode
@property
def native_value(self) -> float | None:
"""Return the entity value to represent the entity state."""
return self.entity_description.value_fn(self.vehicle)
async def async_set_native_value(self, value: float) -> None:
"""Update to the vehicle."""
_LOGGER.debug(
"Executing '%s' on vehicle '%s' to value '%s'",
self.entity_description.key,
self.vehicle.vin,
value,
)
try:
await self.entity_description.remote_service(self.vehicle, value)
except MyBMWAPIError as ex:
raise HomeAssistantError(ex) from ex
@@ -9,7 +9,7 @@ from bimmer_connected.vehicle.charging_profile import ChargingMode
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE, UnitOfElectricCurrent
from homeassistant.const import UnitOfElectricCurrent
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@@ -37,19 +37,6 @@ class BMWSelectEntityDescription(SelectEntityDescription, BMWRequiredKeysMixin):
SELECT_TYPES: dict[str, BMWSelectEntityDescription] = {
# --- Generic ---
"target_soc": BMWSelectEntityDescription(
key="target_soc",
name="Target SoC",
is_available=lambda v: v.is_remote_set_target_soc_enabled,
options=[str(i * 5 + 20) for i in range(17)],
current_option=lambda v: str(v.fuel_and_battery.charging_target),
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(
target_soc=int(o)
),
icon="mdi:battery-charging-medium",
unit_of_measurement=PERCENTAGE,
),
"ac_limit": BMWSelectEntityDescription(
key="ac_limit",
name="AC Charging Limit",
+2 -1
View File
@@ -89,7 +89,8 @@ class BondFan(BondEntity, FanEntity):
features |= FanEntityFeature.SET_SPEED
if self._device.supports_direction():
features |= FanEntityFeature.DIRECTION
if self._device.has_action(Action.BREEZE_ON):
features |= FanEntityFeature.PRESET_MODE
return features
@property
@@ -164,7 +164,7 @@ def _validate_rrule(value: Any) -> str:
try:
rrulestr(value)
except ValueError as err:
raise vol.Invalid(f"Invalid rrule: {str(err)}") from err
raise vol.Invalid(f"Invalid rrule '{value}': {err}") from err
# Example format: FREQ=DAILY;UNTIL=...
rule_parts = dict(s.split("=", 1) for s in value.split(";"))
@@ -23,7 +23,7 @@ from homeassistant.util import language as language_util
from .agent import AbstractConversationAgent, ConversationInput, ConversationResult
from .const import HOME_ASSISTANT_AGENT
from .default_agent import DefaultAgent
from .default_agent import DefaultAgent, async_setup as async_setup_default_agent
__all__ = [
"DOMAIN",
@@ -93,7 +93,9 @@ CONFIG_SCHEMA = vol.Schema(
@core.callback
def _get_agent_manager(hass: HomeAssistant) -> AgentManager:
"""Get the active agent."""
return AgentManager(hass)
manager = AgentManager(hass)
manager.async_setup()
return manager
@core.callback
@@ -389,7 +391,11 @@ class AgentManager:
"""Initialize the conversation agents."""
self.hass = hass
self._agents: dict[str, AbstractConversationAgent] = {}
self._default_agent_init_lock = asyncio.Lock()
self._builtin_agent_init_lock = asyncio.Lock()
def async_setup(self) -> None:
"""Set up the conversation agents."""
async_setup_default_agent(self.hass)
async def async_get_agent(
self, agent_id: str | None = None
@@ -402,7 +408,7 @@ class AgentManager:
if self._builtin_agent is not None:
return self._builtin_agent
async with self._default_agent_init_lock:
async with self._builtin_agent_init_lock:
if self._builtin_agent is not None:
return self._builtin_agent
@@ -73,6 +73,26 @@ def _get_language_variations(language: str) -> Iterable[str]:
yield lang
@core.callback
def async_setup(hass: core.HomeAssistant) -> None:
"""Set up entity registry listener for the default agent."""
entity_registry = er.async_get(hass)
for entity_id in entity_registry.entities:
async_should_expose(hass, DOMAIN, entity_id)
@core.callback
def async_handle_entity_registry_changed(event: core.Event) -> None:
"""Set expose flag on newly created entities."""
if event.data["action"] == "create":
async_should_expose(hass, DOMAIN, event.data["entity_id"])
hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
async_handle_entity_registry_changed,
run_immediately=True,
)
class DefaultAgent(AbstractConversationAgent):
"""Default agent for conversation agent."""
@@ -472,10 +492,10 @@ class DefaultAgent(AbstractConversationAgent):
return self._slot_lists
area_ids_with_entities: set[str] = set()
all_entities = er.async_get(self.hass)
entity_registry = er.async_get(self.hass)
entities = [
entity
for entity in all_entities.entities.values()
for entity in entity_registry.entities.values()
if async_should_expose(self.hass, DOMAIN, entity.entity_id)
]
devices = dr.async_get(self.hass)
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/eddystone_temperature",
"iot_class": "local_polling",
"loggers": ["beacontools"],
"requirements": ["beacontools[scan]==1.2.3", "construct==2.10.56"]
"requirements": ["beacontools[scan]==2.1.0", "construct==2.10.56"]
}
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
"iot_class": "cloud_polling",
"loggers": ["env_canada"],
"requirements": ["env_canada==0.5.33"]
"requirements": ["env_canada==0.5.34"]
}
+1 -1
View File
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/eufy",
"iot_class": "local_polling",
"loggers": ["lakeside"],
"requirements": ["lakeside==0.12"]
"requirements": ["lakeside==0.13"]
}
+9 -2
View File
@@ -236,11 +236,18 @@ class SensorFilter(SensorEntity):
self.async_write_ha_state()
return
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
self._state = new_state.state
if new_state.state == STATE_UNKNOWN:
self._state = None
self.async_write_ha_state()
return
if new_state.state == STATE_UNAVAILABLE:
self._attr_available = False
self.async_write_ha_state()
return
self._attr_available = True
temp_state = _State(new_state.last_updated, new_state.state)
try:
@@ -80,7 +80,10 @@ class FritzBoxBinarySensor(FritzBoxBaseCoordinatorEntity, BinarySensorEntity):
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
if isinstance(
state := self.coordinator.data.get(self.entity_description.key), bool
state := self.coordinator.data["entity_states"].get(
self.entity_description.key
),
bool,
):
return state
return None
+44 -18
View File
@@ -19,6 +19,7 @@ from fritzconnection.core.exceptions import (
from fritzconnection.lib.fritzhosts import FritzHosts
from fritzconnection.lib.fritzstatus import FritzStatus
from fritzconnection.lib.fritzwlan import DEFAULT_PASSWORD_LENGTH, FritzGuestWLAN
import xmltodict
from homeassistant.components.device_tracker import (
CONF_CONSIDER_HOME,
@@ -137,8 +138,15 @@ class HostInfo(TypedDict):
status: bool
class UpdateCoordinatorDataType(TypedDict):
"""Update coordinator data type."""
call_deflections: dict[int, dict]
entity_states: dict[str, StateType | bool]
class FritzBoxTools(
update_coordinator.DataUpdateCoordinator[dict[str, bool | StateType]]
update_coordinator.DataUpdateCoordinator[UpdateCoordinatorDataType]
):
"""FritzBoxTools class."""
@@ -173,6 +181,7 @@ class FritzBoxTools(
self.password = password
self.port = port
self.username = username
self.has_call_deflections: bool = False
self._model: str | None = None
self._current_firmware: str | None = None
self._latest_firmware: str | None = None
@@ -243,6 +252,8 @@ class FritzBoxTools(
)
self.device_is_router = self.fritz_status.has_wan_enabled
self.has_call_deflections = "X_AVM-DE_OnTel1" in self.connection.services
def register_entity_updates(
self, key: str, update_fn: Callable[[FritzStatus, StateType], Any]
) -> Callable[[], None]:
@@ -259,20 +270,30 @@ class FritzBoxTools(
self._entity_update_functions[key] = update_fn
return unregister_entity_updates
async def _async_update_data(self) -> dict[str, bool | StateType]:
async def _async_update_data(self) -> UpdateCoordinatorDataType:
"""Update FritzboxTools data."""
enity_data: dict[str, bool | StateType] = {}
entity_data: UpdateCoordinatorDataType = {
"call_deflections": {},
"entity_states": {},
}
try:
await self.async_scan_devices()
for key, update_fn in self._entity_update_functions.items():
_LOGGER.debug("update entity %s", key)
enity_data[key] = await self.hass.async_add_executor_job(
entity_data["entity_states"][
key
] = await self.hass.async_add_executor_job(
update_fn, self.fritz_status, self.data.get(key)
)
if self.has_call_deflections:
entity_data[
"call_deflections"
] = await self.async_update_call_deflections()
except FRITZ_EXCEPTIONS as ex:
raise update_coordinator.UpdateFailed(ex) from ex
_LOGGER.debug("enity_data: %s", enity_data)
return enity_data
_LOGGER.debug("enity_data: %s", entity_data)
return entity_data
@property
def unique_id(self) -> str:
@@ -354,6 +375,23 @@ class FritzBoxTools(
"""Retrieve latest device information from the FRITZ!Box."""
return await self.hass.async_add_executor_job(self._update_device_info)
async def async_update_call_deflections(
self,
) -> dict[int, dict[str, Any]]:
"""Call GetDeflections action from X_AVM-DE_OnTel service."""
raw_data = await self.hass.async_add_executor_job(
partial(self.connection.call_action, "X_AVM-DE_OnTel1", "GetDeflections")
)
if not raw_data:
return {}
xml_data = xmltodict.parse(raw_data["NewDeflectionList"])
if xml_data.get("List") and (items := xml_data["List"].get("Item")) is not None:
if not isinstance(items, list):
items = [items]
return {int(item["DeflectionId"]): item for item in items}
return {}
async def _async_get_wan_access(self, ip_address: str) -> bool | None:
"""Get WAN access rule for given IP address."""
try:
@@ -772,18 +810,6 @@ class AvmWrapper(FritzBoxTools):
"WLANConfiguration", str(index), "GetInfo"
)
async def async_get_ontel_num_deflections(self) -> dict[str, Any]:
"""Call GetNumberOfDeflections action from X_AVM-DE_OnTel service."""
return await self._async_service_call(
"X_AVM-DE_OnTel", "1", "GetNumberOfDeflections"
)
async def async_get_ontel_deflections(self) -> dict[str, Any]:
"""Call GetDeflections action from X_AVM-DE_OnTel service."""
return await self._async_service_call("X_AVM-DE_OnTel", "1", "GetDeflections")
async def async_set_wlan_configuration(
self, index: int, turn_on: bool
) -> dict[str, Any]:
+1 -1
View File
@@ -309,4 +309,4 @@ class FritzBoxSensor(FritzBoxBaseCoordinatorEntity, SensorEntity):
@property
def native_value(self) -> StateType:
"""Return the value reported by the sensor."""
return self.coordinator.data.get(self.entity_description.key)
return self.coordinator.data["entity_states"].get(self.entity_description.key)
+92 -72
View File
@@ -4,10 +4,8 @@ from __future__ import annotations
import logging
from typing import Any
import xmltodict
from homeassistant.components.network import async_get_source_ip
from homeassistant.components.switch import SwitchEntity
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
@@ -15,6 +13,7 @@ from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo, Entity
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util import slugify
from .common import (
@@ -47,31 +46,15 @@ async def _async_deflection_entities_list(
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_DEFLECTION)
deflections_response = await avm_wrapper.async_get_ontel_num_deflections()
if not deflections_response:
if (
call_deflections := avm_wrapper.data.get("call_deflections")
) is None or not isinstance(call_deflections, dict):
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
return []
_LOGGER.debug(
"Specific %s response: GetNumberOfDeflections=%s",
SWITCH_TYPE_DEFLECTION,
deflections_response,
)
if deflections_response["NewNumberOfDeflections"] == 0:
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
return []
if not (deflection_list := await avm_wrapper.async_get_ontel_deflections()):
return []
items = xmltodict.parse(deflection_list["NewDeflectionList"])["List"]["Item"]
if not isinstance(items, list):
items = [items]
return [
FritzBoxDeflectionSwitch(avm_wrapper, device_friendly_name, dict_of_deflection)
for dict_of_deflection in items
FritzBoxDeflectionSwitch(avm_wrapper, device_friendly_name, cd_id)
for cd_id in call_deflections
]
@@ -273,6 +256,61 @@ async def async_setup_entry(
)
class FritzBoxBaseCoordinatorSwitch(CoordinatorEntity, SwitchEntity):
"""Fritz switch coordinator base class."""
coordinator: AvmWrapper
entity_description: SwitchEntityDescription
_attr_has_entity_name = True
def __init__(
self,
avm_wrapper: AvmWrapper,
device_name: str,
description: SwitchEntityDescription,
) -> None:
"""Init device info class."""
super().__init__(avm_wrapper)
self.entity_description = description
self._device_name = device_name
self._attr_unique_id = f"{avm_wrapper.unique_id}-{description.key}"
@property
def device_info(self) -> DeviceInfo:
"""Return the device information."""
return DeviceInfo(
configuration_url=f"http://{self.coordinator.host}",
connections={(CONNECTION_NETWORK_MAC, self.coordinator.mac)},
identifiers={(DOMAIN, self.coordinator.unique_id)},
manufacturer="AVM",
model=self.coordinator.model,
name=self._device_name,
sw_version=self.coordinator.current_firmware,
)
@property
def data(self) -> dict[str, Any]:
"""Return entity data from coordinator data."""
raise NotImplementedError()
@property
def available(self) -> bool:
"""Return availability based on data availability."""
return super().available and bool(self.data)
async def _async_handle_turn_on_off(self, turn_on: bool) -> None:
"""Handle switch state change request."""
raise NotImplementedError()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on switch."""
await self._async_handle_turn_on_off(turn_on=True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off switch."""
await self._async_handle_turn_on_off(turn_on=False)
class FritzBoxBaseSwitch(FritzBoxBaseEntity):
"""Fritz switch base class."""
@@ -417,69 +455,51 @@ class FritzBoxPortSwitch(FritzBoxBaseSwitch, SwitchEntity):
return bool(resp is not None)
class FritzBoxDeflectionSwitch(FritzBoxBaseSwitch, SwitchEntity):
class FritzBoxDeflectionSwitch(FritzBoxBaseCoordinatorSwitch):
"""Defines a FRITZ!Box Tools PortForward switch."""
_attr_entity_category = EntityCategory.CONFIG
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
dict_of_deflection: Any,
deflection_id: int,
) -> None:
"""Init Fritxbox Deflection class."""
self._avm_wrapper = avm_wrapper
self.dict_of_deflection = dict_of_deflection
self._attributes = {}
self.id = int(self.dict_of_deflection["DeflectionId"])
self._attr_entity_category = EntityCategory.CONFIG
switch_info = SwitchInfo(
description=f"Call deflection {self.id}",
friendly_name=device_friendly_name,
self.deflection_id = deflection_id
description = SwitchEntityDescription(
key=f"call_deflection_{self.deflection_id}",
name=f"Call deflection {self.deflection_id}",
icon="mdi:phone-forward",
type=SWITCH_TYPE_DEFLECTION,
callback_update=self._async_fetch_update,
callback_switch=self._async_switch_on_off_executor,
)
super().__init__(self._avm_wrapper, device_friendly_name, switch_info)
super().__init__(avm_wrapper, device_friendly_name, description)
async def _async_fetch_update(self) -> None:
"""Fetch updates."""
@property
def data(self) -> dict[str, Any]:
"""Return call deflection data."""
return self.coordinator.data["call_deflections"].get(self.deflection_id, {})
resp = await self._avm_wrapper.async_get_ontel_deflections()
if not resp:
self._is_available = False
return
@property
def extra_state_attributes(self) -> dict[str, str]:
"""Return device attributes."""
return {
"type": self.data["Type"],
"number": self.data["Number"],
"deflection_to_number": self.data["DeflectionToNumber"],
"mode": self.data["Mode"][1:],
"outgoing": self.data["Outgoing"],
"phonebook_id": self.data["PhonebookID"],
}
self.dict_of_deflection = xmltodict.parse(resp["NewDeflectionList"])["List"][
"Item"
]
if isinstance(self.dict_of_deflection, list):
self.dict_of_deflection = self.dict_of_deflection[self.id]
@property
def is_on(self) -> bool | None:
"""Switch status."""
return self.data.get("Enable") == "1"
_LOGGER.debug(
"Specific %s response: NewDeflectionList=%s",
SWITCH_TYPE_DEFLECTION,
self.dict_of_deflection,
)
self._attr_is_on = self.dict_of_deflection["Enable"] == "1"
self._is_available = True
self._attributes["type"] = self.dict_of_deflection["Type"]
self._attributes["number"] = self.dict_of_deflection["Number"]
self._attributes["deflection_to_number"] = self.dict_of_deflection[
"DeflectionToNumber"
]
# Return mode sample: "eImmediately"
self._attributes["mode"] = self.dict_of_deflection["Mode"][1:]
self._attributes["outgoing"] = self.dict_of_deflection["Outgoing"]
self._attributes["phonebook_id"] = self.dict_of_deflection["PhonebookID"]
async def _async_switch_on_off_executor(self, turn_on: bool) -> None:
async def _async_handle_turn_on_off(self, turn_on: bool) -> None:
"""Handle deflection switch."""
await self._avm_wrapper.async_set_deflection_enable(self.id, turn_on)
await self.coordinator.async_set_deflection_enable(self.deflection_id, turn_on)
class FritzBoxProfileSwitch(FritzDeviceBase, SwitchEntity):
@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20230427.0"]
"requirements": ["home-assistant-frontend==20230501.0"]
}
@@ -43,10 +43,10 @@ class GoogleMailSensor(GoogleMailEntity, SensorEntity):
"""Get the vacation data."""
service = await self.auth.get_resource()
settings: HttpRequest = service.users().settings().getVacation(userId="me")
data = await self.hass.async_add_executor_job(settings.execute)
data: dict = await self.hass.async_add_executor_job(settings.execute)
if data["enableAutoReply"]:
value = datetime.fromtimestamp(int(data["endTime"]) / 1000, tz=timezone.utc)
if data["enableAutoReply"] and (end := data.get("endTime")):
value = datetime.fromtimestamp(int(end) / 1000, tz=timezone.utc)
else:
value = None
self._attr_native_value = value
+11 -10
View File
@@ -12,6 +12,7 @@ from homeassistant.components import frontend
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.recorder import get_instance, history
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import CONF_EXCLUDE, CONF_INCLUDE
from homeassistant.core import HomeAssistant, valid_entity_id
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA
@@ -27,16 +28,16 @@ CONF_ORDER = "use_include_order"
_ONE_DAY = timedelta(days=1)
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.All(
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
),
)
},
),
{
DOMAIN: vol.All(
cv.deprecated(CONF_INCLUDE),
cv.deprecated(CONF_EXCLUDE),
cv.deprecated(CONF_ORDER),
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
),
)
},
extra=vol.ALLOW_EXTRA,
)
@@ -18,7 +18,7 @@
"loggers": ["pyinsteon", "pypubsub"],
"requirements": [
"pyinsteon==1.4.2",
"insteon-frontend-home-assistant==0.3.4"
"insteon-frontend-home-assistant==0.3.5"
],
"usb": [
{
@@ -129,7 +129,7 @@ class LocalCalendarEntity(CalendarEntity):
recurrence_range=range_value,
)
except EventStoreError as err:
raise HomeAssistantError("Error while deleting event: {err}") from err
raise HomeAssistantError(f"Error while deleting event: {err}") from err
await self._async_store()
await self.async_update_ha_state(force_refresh=True)
@@ -153,7 +153,7 @@ class LocalCalendarEntity(CalendarEntity):
recurrence_range=range_value,
)
except EventStoreError as err:
raise HomeAssistantError("Error while updating event: {err}") from err
raise HomeAssistantError(f"Error while updating event: {err}") from err
await self._async_store()
await self.async_update_ha_state(force_refresh=True)
+1 -1
View File
@@ -194,6 +194,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
conf = dict(entry.data)
hass_config = await conf_util.async_hass_config_yaml(hass)
mqtt_yaml = PLATFORM_CONFIG_SCHEMA_BASE(hass_config.get(DOMAIN, {}))
await async_create_certificate_temp_files(hass, conf)
client = MQTT(hass, entry, conf)
if DOMAIN in hass.data:
mqtt_data = get_mqtt_data(hass)
@@ -206,7 +207,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[DATA_MQTT] = mqtt_data = MqttData(config=mqtt_yaml, client=client)
client.start(mqtt_data)
await async_create_certificate_temp_files(hass, dict(entry.data))
# Restore saved subscriptions
if mqtt_data.subscriptions_to_restore:
mqtt_data.client.async_restore_tracked_subscriptions(
+3
View File
@@ -740,6 +740,9 @@ class MQTT:
asyncio.run_coroutine_threadsafe(
publish_birth_message(birth_message), self.hass.loop
)
else:
# Update subscribe cooldown period to a shorter time
self._subscribe_debouncer.set_timeout(SUBSCRIBE_COOLDOWN)
async def _async_resubscribe(self) -> None:
"""Resubscribe on reconnect."""
+1 -1
View File
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/nina",
"iot_class": "cloud_polling",
"loggers": ["pynina"],
"requirements": ["pynina==0.2.0"]
"requirements": ["pynina==0.3.0"]
}
@@ -1,4 +1,5 @@
"""The ONVIF integration."""
import asyncio
import logging
from httpx import RequestError
@@ -57,6 +58,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
raise ConfigEntryNotReady(
f"Could not setup camera {device.device.host}:{device.device.port}: {err}"
) from err
except asyncio.CancelledError as err:
# After https://github.com/agronholm/anyio/issues/374 is resolved
# this may be able to be removed
await device.device.close()
raise ConfigEntryNotReady(f"Setup was unexpectedly canceled: {err}") from err
if not device.available:
raise ConfigEntryNotReady()
@@ -316,6 +316,15 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
# Verify there is an H264 profile
media_service = device.create_media_service()
profiles = await media_service.GetProfiles()
except AttributeError: # Likely an empty document or 404 from the wrong port
LOGGER.debug(
"%s: No ONVIF service found at %s:%s",
self.onvif_config[CONF_NAME],
self.onvif_config[CONF_HOST],
self.onvif_config[CONF_PORT],
exc_info=True,
)
return {CONF_PORT: "no_onvif_service"}, {}
except Fault as err:
stringified_error = stringify_onvif_error(err)
description_placeholders = {"error": stringified_error}
+20 -3
View File
@@ -6,6 +6,7 @@ from contextlib import suppress
import datetime as dt
import os
import time
from typing import Any
from httpx import RequestError
import onvif
@@ -55,6 +56,7 @@ class ONVIFDevice:
self.info: DeviceInfo = DeviceInfo()
self.capabilities: Capabilities = Capabilities()
self.onvif_capabilities: dict[str, Any] | None = None
self.profiles: list[Profile] = []
self.max_resolution: int = 0
self.platforms: list[Platform] = []
@@ -98,6 +100,10 @@ class ONVIFDevice:
# Get all device info
await self.device.update_xaddrs()
# Get device capabilities
self.onvif_capabilities = await self.device.get_capabilities()
await self.async_check_date_and_time()
# Create event manager
@@ -107,8 +113,9 @@ class ONVIFDevice:
# Fetch basic device info and capabilities
self.info = await self.async_get_device_info()
LOGGER.debug("Camera %s info = %s", self.name, self.info)
self.capabilities = await self.async_get_capabilities()
LOGGER.debug("Camera %s capabilities = %s", self.name, self.capabilities)
# Check profiles before capabilities since the camera may be slow to respond
# once the event manager is started in async_get_capabilities.
self.profiles = await self.async_get_profiles()
LOGGER.debug("Camera %s profiles = %s", self.name, self.profiles)
@@ -116,6 +123,9 @@ class ONVIFDevice:
if not self.profiles:
raise ONVIFError("No camera profiles found")
self.capabilities = await self.async_get_capabilities()
LOGGER.debug("Camera %s capabilities = %s", self.name, self.capabilities)
if self.capabilities.ptz:
self.device.create_ptz_service()
@@ -299,7 +309,14 @@ class ONVIFDevice:
events = False
with suppress(*GET_CAPABILITIES_EXCEPTIONS, XMLParseError):
events = await self.events.async_start()
onvif_capabilities = self.onvif_capabilities or {}
pull_point_support = onvif_capabilities.get("Events", {}).get(
"WSPullPointSupport"
)
LOGGER.debug("%s: WSPullPointSupport: %s", self.name, pull_point_support)
events = await self.events.async_start(
pull_point_support is not False, True
)
return Capabilities(snapshot, events, ptz, imaging)
+6 -3
View File
@@ -123,11 +123,13 @@ class EventManager:
if not self._listeners:
self.pullpoint_manager.async_cancel_pull_messages()
async def async_start(self) -> bool:
async def async_start(self, try_pullpoint: bool, try_webhook: bool) -> bool:
"""Start polling events."""
# Always start pull point first, since it will populate the event list
event_via_pull_point = await self.pullpoint_manager.async_start()
events_via_webhook = await self.webhook_manager.async_start()
event_via_pull_point = (
try_pullpoint and await self.pullpoint_manager.async_start()
)
events_via_webhook = try_webhook and await self.webhook_manager.async_start()
return events_via_webhook or event_via_pull_point
async def async_stop(self) -> None:
@@ -769,6 +771,7 @@ class WebHookManager:
return
webhook_id = self._webhook_unique_id
self._async_unregister_webhook()
webhook.async_register(
self._hass, DOMAIN, webhook_id, webhook_id, self._async_handle_webhook
)
+1 -1
View File
@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/onvif",
"iot_class": "local_push",
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": ["onvif-zeep-async==1.3.0", "WSDiscovery==2.0.0"]
"requirements": ["onvif-zeep-async==1.3.1", "WSDiscovery==2.0.0"]
}
@@ -11,6 +11,7 @@
"error": {
"onvif_error": "Error setting up ONVIF device: {error}. Check logs for more information.",
"auth_failed": "Could not authenticate: {error}",
"no_onvif_service": "No ONVIF service found. Check that the port number is correct.",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"step": {
+6 -1
View File
@@ -18,7 +18,12 @@ def stringify_onvif_error(error: Exception) -> str:
if isinstance(error, Fault):
message = error.message
if error.detail:
message += ": " + error.detail
# Detail may be a bytes object, so we need to convert it to string
if isinstance(error.detail, bytes):
detail = error.detail.decode("utf-8", "replace")
else:
detail = str(error.detail)
message += ": " + detail
if error.code:
message += f" (code:{error.code})"
if error.subcodes:
+4 -23
View File
@@ -3,7 +3,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any, cast
from typing import Any
from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState
from pyoverkiz.enums.ui import UIClass, UIWidget
@@ -15,12 +15,12 @@ from homeassistant.components.switch import (
SwitchEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.const import EntityCategory, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import HomeAssistantOverkizData
from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES
from .const import DOMAIN
from .entity import OverkizDescriptiveEntity
@@ -107,19 +107,6 @@ SWITCH_DESCRIPTIONS: list[OverkizSwitchDescription] = [
),
entity_category=EntityCategory.CONFIG,
),
OverkizSwitchDescription(
key=UIWidget.DYNAMIC_SHUTTER,
name="Silent mode",
turn_on=OverkizCommand.ACTIVATE_OPTION,
turn_on_args=OverkizCommandParam.SILENCE,
turn_off=OverkizCommand.DEACTIVATE_OPTION,
turn_off_args=OverkizCommandParam.SILENCE,
is_on=lambda select_state: (
OverkizCommandParam.SILENCE
in cast(list, select_state(OverkizState.CORE_ACTIVATED_OPTIONS))
),
icon="mdi:feather",
),
]
SUPPORTED_DEVICES = {
@@ -136,13 +123,7 @@ async def async_setup_entry(
data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id]
entities: list[OverkizSwitch] = []
for device in data.coordinator.data.values():
if (
device.widget in IGNORED_OVERKIZ_DEVICES
or device.ui_class in IGNORED_OVERKIZ_DEVICES
):
continue
for device in data.platforms[Platform.SWITCH]:
if description := SUPPORTED_DEVICES.get(device.widget) or SUPPORTED_DEVICES.get(
device.ui_class
):
@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["crcmod", "plugwise"],
"requirements": ["plugwise==0.31.0"],
"requirements": ["plugwise==0.31.1"],
"zeroconf": ["_plugwise._tcp.local."]
}
@@ -8,6 +8,7 @@ from ..schema import (
correct_db_schema_precision,
correct_db_schema_utf8,
validate_db_schema_precision,
validate_table_schema_has_correct_collation,
validate_table_schema_supports_utf8,
)
@@ -17,9 +18,12 @@ if TYPE_CHECKING:
def validate_db_schema(instance: Recorder) -> set[str]:
"""Do some basic checks for common schema errors caused by manual migration."""
return validate_table_schema_supports_utf8(
schema_errors = validate_table_schema_supports_utf8(
instance, EventData, (EventData.shared_data,)
) | validate_db_schema_precision(instance, Events)
for table in (Events, EventData):
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
return schema_errors
def correct_db_schema(
@@ -27,5 +31,6 @@ def correct_db_schema(
schema_errors: set[str],
) -> None:
"""Correct issues detected by validate_db_schema."""
correct_db_schema_utf8(instance, EventData, schema_errors)
for table in (Events, EventData):
correct_db_schema_utf8(instance, table, schema_errors)
correct_db_schema_precision(instance, Events, schema_errors)
@@ -5,6 +5,7 @@ from collections.abc import Iterable, Mapping
import logging
from typing import TYPE_CHECKING
from sqlalchemy import MetaData
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm.attributes import InstrumentedAttribute
@@ -60,6 +61,60 @@ def validate_table_schema_supports_utf8(
return schema_errors
def validate_table_schema_has_correct_collation(
instance: Recorder,
table_object: type[DeclarativeBase],
) -> set[str]:
"""Verify the table has the correct collation."""
schema_errors: set[str] = set()
# Lack of full utf8 support is only an issue for MySQL / MariaDB
if instance.dialect_name != SupportedDialect.MYSQL:
return schema_errors
try:
schema_errors = _validate_table_schema_has_correct_collation(
instance, table_object
)
except Exception as exc: # pylint: disable=broad-except
_LOGGER.exception("Error when validating DB schema: %s", exc)
_log_schema_errors(table_object, schema_errors)
return schema_errors
def _validate_table_schema_has_correct_collation(
instance: Recorder,
table_object: type[DeclarativeBase],
) -> set[str]:
"""Ensure the table has the correct collation to avoid union errors with mixed collations."""
schema_errors: set[str] = set()
# Mark the session as read_only to ensure that the test data is not committed
# to the database and we always rollback when the scope is exited
with session_scope(session=instance.get_session(), read_only=True) as session:
table = table_object.__tablename__
metadata_obj = MetaData()
connection = session.connection()
metadata_obj.reflect(bind=connection)
dialect_kwargs = metadata_obj.tables[table].dialect_kwargs
# Check if the table has a collation set, if its not set than its
# using the server default collation for the database
collate = (
dialect_kwargs.get("mysql_collate")
or dialect_kwargs.get(
"mariadb_collate"
) # pylint: disable-next=protected-access
or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined]
)
if collate and collate != "utf8mb4_unicode_ci":
_LOGGER.debug(
"Database %s collation is not utf8mb4_unicode_ci",
table,
)
schema_errors.add(f"{table}.utf8mb4_unicode_ci")
return schema_errors
def _validate_table_schema_supports_utf8(
instance: Recorder,
table_object: type[DeclarativeBase],
@@ -184,7 +239,10 @@ def correct_db_schema_utf8(
) -> None:
"""Correct utf8 issues detected by validate_db_schema."""
table_name = table_object.__tablename__
if f"{table_name}.4-byte UTF-8" in schema_errors:
if (
f"{table_name}.4-byte UTF-8" in schema_errors
or f"{table_name}.utf8mb4_unicode_ci" in schema_errors
):
from ..migration import ( # pylint: disable=import-outside-toplevel
_correct_table_character_set_and_collation,
)
@@ -8,6 +8,7 @@ from ..schema import (
correct_db_schema_precision,
correct_db_schema_utf8,
validate_db_schema_precision,
validate_table_schema_has_correct_collation,
validate_table_schema_supports_utf8,
)
@@ -26,6 +27,8 @@ def validate_db_schema(instance: Recorder) -> set[str]:
for table, columns in TABLE_UTF8_COLUMNS.items():
schema_errors |= validate_table_schema_supports_utf8(instance, table, columns)
schema_errors |= validate_db_schema_precision(instance, States)
for table in (States, StateAttributes):
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
return schema_errors
@@ -9,6 +9,7 @@ from ..schema import (
correct_db_schema_precision,
correct_db_schema_utf8,
validate_db_schema_precision,
validate_table_schema_has_correct_collation,
validate_table_schema_supports_utf8,
)
@@ -26,6 +27,7 @@ def validate_db_schema(instance: Recorder) -> set[str]:
)
for table in (Statistics, StatisticsShortTerm):
schema_errors |= validate_db_schema_precision(instance, table)
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
if schema_errors:
_LOGGER.debug(
"Detected statistics schema errors: %s", ", ".join(sorted(schema_errors))
@@ -41,3 +43,4 @@ def correct_db_schema(
correct_db_schema_utf8(instance, StatisticsMeta, schema_errors)
for table in (Statistics, StatisticsShortTerm):
correct_db_schema_precision(instance, table, schema_errors)
correct_db_schema_utf8(instance, table, schema_errors)
@@ -7,7 +7,7 @@
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": [
"sqlalchemy==2.0.11",
"sqlalchemy==2.0.12",
"fnv-hash-fast==0.3.1",
"psutil-home-assistant==0.0.1"
]
+10 -10
View File
@@ -1158,23 +1158,23 @@ def _wipe_old_string_time_columns(
elif engine.dialect.name == SupportedDialect.MYSQL:
#
# Since this is only to save space we limit the number of rows we update
# to 10,000,000 per table since we do not want to block the database for too long
# to 100,000 per table since we do not want to block the database for too long
# or run out of innodb_buffer_pool_size on MySQL. The old data will eventually
# be cleaned up by the recorder purge if we do not do it now.
#
session.execute(text("UPDATE events set time_fired=NULL LIMIT 10000000;"))
session.execute(text("UPDATE events set time_fired=NULL LIMIT 100000;"))
session.commit()
session.execute(
text(
"UPDATE states set last_updated=NULL, last_changed=NULL "
" LIMIT 10000000;"
" LIMIT 100000;"
)
)
session.commit()
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
#
# Since this is only to save space we limit the number of rows we update
# to 250,000 per table since we do not want to block the database for too long
# to 100,000 per table since we do not want to block the database for too long
# or run out ram with postgresql. The old data will eventually
# be cleaned up by the recorder purge if we do not do it now.
#
@@ -1182,7 +1182,7 @@ def _wipe_old_string_time_columns(
text(
"UPDATE events set time_fired=NULL "
"where event_id in "
"(select event_id from events where time_fired_ts is NOT NULL LIMIT 250000);"
"(select event_id from events where time_fired_ts is NOT NULL LIMIT 100000);"
)
)
session.commit()
@@ -1190,7 +1190,7 @@ def _wipe_old_string_time_columns(
text(
"UPDATE states set last_updated=NULL, last_changed=NULL "
"where state_id in "
"(select state_id from states where last_updated_ts is NOT NULL LIMIT 250000);"
"(select state_id from states where last_updated_ts is NOT NULL LIMIT 100000);"
)
)
session.commit()
@@ -1236,7 +1236,7 @@ def _migrate_columns_to_timestamp(
"UNIX_TIMESTAMP(time_fired)"
") "
"where time_fired_ts is NULL "
"LIMIT 250000;"
"LIMIT 100000;"
)
)
result = None
@@ -1251,7 +1251,7 @@ def _migrate_columns_to_timestamp(
"last_changed_ts="
"UNIX_TIMESTAMP(last_changed) "
"where last_updated_ts is NULL "
"LIMIT 250000;"
"LIMIT 100000;"
)
)
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
@@ -1266,7 +1266,7 @@ def _migrate_columns_to_timestamp(
"time_fired_ts= "
"(case when time_fired is NULL then 0 else EXTRACT(EPOCH FROM time_fired::timestamptz) end) "
"WHERE event_id IN ( "
"SELECT event_id FROM events where time_fired_ts is NULL LIMIT 250000 "
"SELECT event_id FROM events where time_fired_ts is NULL LIMIT 100000 "
" );"
)
)
@@ -1279,7 +1279,7 @@ def _migrate_columns_to_timestamp(
"(case when last_updated is NULL then 0 else EXTRACT(EPOCH FROM last_updated::timestamptz) end), "
"last_changed_ts=EXTRACT(EPOCH FROM last_changed::timestamptz) "
"where state_id IN ( "
"SELECT state_id FROM states where last_updated_ts is NULL LIMIT 250000 "
"SELECT state_id FROM states where last_updated_ts is NULL LIMIT 100000 "
" );"
)
)
+54 -7
View File
@@ -34,6 +34,7 @@ from .queries import (
find_event_types_to_purge,
find_events_to_purge,
find_latest_statistics_runs_run_id,
find_legacy_detached_states_and_attributes_to_purge,
find_legacy_event_state_and_attributes_and_data_ids_to_purge,
find_legacy_row,
find_short_term_statistics_to_purge,
@@ -146,7 +147,28 @@ def _purge_legacy_format(
_purge_unused_attributes_ids(instance, session, attributes_ids)
_purge_event_ids(session, event_ids)
_purge_unused_data_ids(instance, session, data_ids)
return bool(event_ids or state_ids or attributes_ids or data_ids)
# The database may still have some rows that have an event_id but are not
# linked to any event. These rows are not linked to any event because the
# event was deleted. We need to purge these rows as well or we will never
# switch to the new format which will prevent us from purging any events
# that happened after the detached states.
(
detached_state_ids,
detached_attributes_ids,
) = _select_legacy_detached_state_and_attributes_and_data_ids_to_purge(
session, purge_before
)
_purge_state_ids(instance, session, detached_state_ids)
_purge_unused_attributes_ids(instance, session, detached_attributes_ids)
return bool(
event_ids
or state_ids
or attributes_ids
or data_ids
or detached_state_ids
or detached_attributes_ids
)
def _purge_states_and_attributes_ids(
@@ -412,6 +434,31 @@ def _select_short_term_statistics_to_purge(
return [statistic.id for statistic in statistics]
def _select_legacy_detached_state_and_attributes_and_data_ids_to_purge(
session: Session, purge_before: datetime
) -> tuple[set[int], set[int]]:
"""Return a list of state, and attribute ids to purge.
We do not link these anymore since state_change events
do not exist in the events table anymore, however we
still need to be able to purge them.
"""
states = session.execute(
find_legacy_detached_states_and_attributes_to_purge(
dt_util.utc_to_timestamp(purge_before)
)
).all()
_LOGGER.debug("Selected %s state ids to remove", len(states))
state_ids = set()
attributes_ids = set()
for state in states:
if state_id := state.state_id:
state_ids.add(state_id)
if attributes_id := state.attributes_id:
attributes_ids.add(attributes_id)
return state_ids, attributes_ids
def _select_legacy_event_state_and_attributes_and_data_ids_to_purge(
session: Session, purge_before: datetime
) -> tuple[set[int], set[int], set[int], set[int]]:
@@ -433,12 +480,12 @@ def _select_legacy_event_state_and_attributes_and_data_ids_to_purge(
data_ids = set()
for event in events:
event_ids.add(event.event_id)
if event.state_id:
state_ids.add(event.state_id)
if event.attributes_id:
attributes_ids.add(event.attributes_id)
if event.data_id:
data_ids.add(event.data_id)
if state_id := event.state_id:
state_ids.add(state_id)
if attributes_id := event.attributes_id:
attributes_ids.add(attributes_id)
if data_id := event.data_id:
data_ids.add(data_id)
return event_ids, state_ids, attributes_ids, data_ids
@@ -678,6 +678,22 @@ def find_legacy_event_state_and_attributes_and_data_ids_to_purge(
)
def find_legacy_detached_states_and_attributes_to_purge(
purge_before: float,
) -> StatementLambdaElement:
"""Find states rows with event_id set but not linked event_id in Events."""
return lambda_stmt(
lambda: select(States.state_id, States.attributes_id)
.outerjoin(Events, States.event_id == Events.event_id)
.filter(States.event_id.isnot(None))
.filter(
(States.last_updated_ts < purge_before) | States.last_updated_ts.is_(None)
)
.filter(Events.event_id.is_(None))
.limit(SQLITE_MAX_BIND_VARS)
)
def find_legacy_row() -> StatementLambdaElement:
"""Check if there are still states in the table with an event_id."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
@@ -2314,7 +2314,7 @@ def cleanup_statistics_timestamp_migration(instance: Recorder) -> bool:
session.connection()
.execute(
text(
f"UPDATE {table} set start=NULL, created=NULL, last_reset=NULL where start is not NULL LIMIT 250000;"
f"UPDATE {table} set start=NULL, created=NULL, last_reset=NULL where start is not NULL LIMIT 100000;"
)
)
.rowcount
@@ -2330,7 +2330,7 @@ def cleanup_statistics_timestamp_migration(instance: Recorder) -> bool:
.execute(
text(
f"UPDATE {table} set start=NULL, created=NULL, last_reset=NULL " # nosec
f"where id in (select id from {table} where start is not NULL LIMIT 250000)"
f"where id in (select id from {table} where start is not NULL LIMIT 100000)"
)
)
.rowcount
@@ -6,7 +6,13 @@ from typing import Any
from roborock.api import RoborockApiClient
from roborock.containers import UserData
from roborock.exceptions import RoborockException
from roborock.exceptions import (
RoborockAccountDoesNotExist,
RoborockException,
RoborockInvalidCode,
RoborockInvalidEmail,
RoborockUrlException,
)
import voluptuous as vol
from homeassistant import config_entries
@@ -43,9 +49,15 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
self._client = RoborockApiClient(username)
try:
await self._client.request_code()
except RoborockAccountDoesNotExist:
errors["base"] = "invalid_email"
except RoborockUrlException:
errors["base"] = "unknown_url"
except RoborockInvalidEmail:
errors["base"] = "invalid_email_format"
except RoborockException as ex:
_LOGGER.exception(ex)
errors["base"] = "invalid_email"
errors["base"] = "unknown_roborock"
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception(ex)
errors["base"] = "unknown"
@@ -70,9 +82,11 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
_LOGGER.debug("Logging into Roborock account using email provided code")
try:
login_data = await self._client.code_login(code)
except RoborockInvalidCode:
errors["base"] = "invalid_code"
except RoborockException as ex:
_LOGGER.exception(ex)
errors["base"] = "invalid_code"
errors["base"] = "unknown_roborock"
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception(ex)
errors["base"] = "unknown"
@@ -13,7 +13,7 @@ from roborock.containers import (
)
from roborock.exceptions import RoborockException
from roborock.local_api import RoborockLocalClient
from roborock.typing import RoborockDeviceProp
from roborock.typing import DeviceProp
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -26,9 +26,7 @@ SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
class RoborockDataUpdateCoordinator(
DataUpdateCoordinator[dict[str, RoborockDeviceProp]]
):
class RoborockDataUpdateCoordinator(DataUpdateCoordinator[dict[str, DeviceProp]]):
"""Class to manage fetching data from the API."""
def __init__(
@@ -50,7 +48,7 @@ class RoborockDataUpdateCoordinator(
device,
networking,
product_info[device.product_id],
RoborockDeviceProp(),
DeviceProp(),
)
local_devices_info[device.duid] = RoborockLocalDeviceInfo(
device, networking
@@ -71,7 +69,7 @@ class RoborockDataUpdateCoordinator(
else:
device_info.props = device_prop
async def _async_update_data(self) -> dict[str, RoborockDeviceProp]:
async def _async_update_data(self) -> dict[str, DeviceProp]:
"""Update data via library."""
try:
await asyncio.gather(
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/roborock",
"iot_class": "local_polling",
"loggers": ["roborock"],
"requirements": ["python-roborock==0.6.5"]
"requirements": ["python-roborock==0.8.3"]
}
+2 -2
View File
@@ -2,7 +2,7 @@
from dataclasses import dataclass
from roborock.containers import HomeDataDevice, HomeDataProduct, NetworkInfo
from roborock.typing import RoborockDeviceProp
from roborock.typing import DeviceProp
@dataclass
@@ -12,4 +12,4 @@ class RoborockHassDeviceInfo:
device: HomeDataDevice
network_info: NetworkInfo
product: HomeDataProduct
props: RoborockDeviceProp
props: DeviceProp
@@ -17,6 +17,9 @@
"error": {
"invalid_code": "The code you entered was incorrect, please check it and try again.",
"invalid_email": "There is no account associated with the email you entered, please try again.",
"invalid_email_format": "There is an issue with the formatting of your email - please try again.",
"unknown_roborock": "There was an unknown roborock exception - please check your logs.",
"unknown_url": "There was an issue determining the correct url for your roborock account - please check your logs.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
@@ -91,6 +91,11 @@ SERVICE_SCHEMA_FEEDBACK = vol.Schema(
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Activate Snips component."""
# Make sure MQTT integration is enabled and the client is available
if not await mqtt.async_wait_for_mqtt_client(hass):
_LOGGER.error("MQTT integration is not available")
return False
async def async_set_feedback(site_ids, state):
"""Set Feedback sound state."""
site_ids = site_ids if site_ids else config[DOMAIN].get(CONF_SITE_IDS)
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/solaredge_local",
"iot_class": "local_polling",
"loggers": ["solaredge_local"],
"requirements": ["solaredge-local==0.2.0"]
"requirements": ["solaredge-local==0.2.3"]
}
@@ -290,7 +290,7 @@ class SolarEdgeSensor(SensorEntity):
"""Return the state attributes."""
if extra_attr := self.entity_description.extra_attribute:
try:
return {extra_attr: self._data.info[self.entity_description.key]}
return {extra_attr: self._data.info.get(self.entity_description.key)}
except KeyError:
pass
return None
@@ -298,7 +298,7 @@ class SolarEdgeSensor(SensorEntity):
def update(self) -> None:
"""Get the latest data from the sensor and update the state."""
self._data.update()
self._attr_native_value = self._data.data[self.entity_description.key]
self._attr_native_value = self._data.data.get(self.entity_description.key)
class SolarEdgeData:
+1 -1
View File
@@ -8,7 +8,7 @@
"documentation": "https://www.home-assistant.io/integrations/sonos",
"iot_class": "local_push",
"loggers": ["soco"],
"requirements": ["soco==0.29.1", "sonos-websocket==0.0.5"],
"requirements": ["soco==0.29.1", "sonos-websocket==0.1.0"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:ZonePlayer:1"
+11 -11
View File
@@ -506,13 +506,23 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
If media_type is "playlist", media_id should be a Sonos
Playlist name. Otherwise, media_id should be a URI.
"""
is_radio = False
if media_source.is_media_source_id(media_id):
is_radio = media_id.startswith("media-source://radio_browser/")
media_type = MediaType.MUSIC
media = await media_source.async_resolve_media(
self.hass, media_id, self.entity_id
)
media_id = async_process_play_media_url(self.hass, media.url)
if kwargs.get(ATTR_MEDIA_ANNOUNCE):
volume = kwargs.get("extra", {}).get("volume")
_LOGGER.debug("Playing %s using websocket audioclip", media_id)
try:
assert self.speaker.websocket
response, _ = await self.speaker.websocket.play_clip(
media_id,
async_process_play_media_url(self.hass, media_id),
volume=volume,
)
except SonosWebsocketError as exc:
@@ -526,16 +536,6 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
media_type = spotify.resolve_spotify_media_type(media_type)
media_id = spotify.spotify_uri_from_media_browser_url(media_id)
is_radio = False
if media_source.is_media_source_id(media_id):
is_radio = media_id.startswith("media-source://radio_browser/")
media_type = MediaType.MUSIC
media = await media_source.async_resolve_media(
self.hass, media_id, self.entity_id
)
media_id = media.url
await self.hass.async_add_executor_job(
partial(self._play_media, media_type, media_id, is_radio, **kwargs)
)
+10
View File
@@ -1,6 +1,8 @@
"""The sql component."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.components.recorder import CONF_DB_URL, get_instance
@@ -24,6 +26,9 @@ import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
from .const import CONF_COLUMN_NAME, CONF_QUERY, DOMAIN, PLATFORMS
from .util import redact_credentials
_LOGGER = logging.getLogger(__name__)
def validate_sql_select(value: str) -> str:
@@ -85,6 +90,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up SQL from a config entry."""
_LOGGER.debug(
"Comparing %s and %s",
redact_credentials(entry.options.get(CONF_DB_URL)),
redact_credentials(get_instance(hass).db_url),
)
if entry.options.get(CONF_DB_URL) == get_instance(hass).db_url:
remove_configured_db_url_if_not_needed(hass, entry)
+14 -13
View File
@@ -11,7 +11,7 @@ from sqlalchemy.orm import Session, scoped_session, sessionmaker
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.recorder import CONF_DB_URL
from homeassistant.components.recorder import CONF_DB_URL, get_instance
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
@@ -159,13 +159,9 @@ class SQLConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
)
class SQLOptionsFlowHandler(config_entries.OptionsFlow):
class SQLOptionsFlowHandler(config_entries.OptionsFlowWithConfigEntry):
"""Handle SQL options."""
def __init__(self, entry: config_entries.ConfigEntry) -> None:
"""Initialize SQL options flow."""
self.entry = entry
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
@@ -177,7 +173,7 @@ class SQLOptionsFlowHandler(config_entries.OptionsFlow):
db_url = user_input.get(CONF_DB_URL)
query = user_input[CONF_QUERY]
column = user_input[CONF_COLUMN_NAME]
name = self.entry.options.get(CONF_NAME, self.entry.title)
name = self.options.get(CONF_NAME, self.config_entry.title)
try:
validate_sql_select(query)
@@ -193,21 +189,26 @@ class SQLOptionsFlowHandler(config_entries.OptionsFlow):
except ValueError:
errors["query"] = "query_invalid"
else:
new_user_input = user_input
if new_user_input.get(CONF_DB_URL) and db_url == db_url_for_validation:
new_user_input.pop(CONF_DB_URL)
recorder_db = get_instance(self.hass).db_url
_LOGGER.debug(
"db_url: %s, resolved db_url: %s, recorder: %s",
db_url,
db_url_for_validation,
recorder_db,
)
if db_url and db_url_for_validation == recorder_db:
user_input.pop(CONF_DB_URL)
return self.async_create_entry(
title="",
data={
CONF_NAME: name,
**new_user_input,
**user_input,
},
)
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
OPTIONS_SCHEMA, user_input or self.entry.options
OPTIONS_SCHEMA, user_input or self.options
),
errors=errors,
description_placeholders=description_placeholders,
+1 -1
View File
@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sql",
"iot_class": "local_polling",
"requirements": ["sqlalchemy==2.0.11"]
"requirements": ["sqlalchemy==2.0.12"]
}
+2 -7
View File
@@ -42,20 +42,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_COLUMN_NAME, CONF_QUERY, DB_URL_RE, DOMAIN
from .const import CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
from .models import SQLData
from .util import resolve_db_url
from .util import redact_credentials, resolve_db_url
_LOGGER = logging.getLogger(__name__)
_SQL_LAMBDA_CACHE: LRUCache = LRUCache(1000)
def redact_credentials(data: str) -> str:
"""Redact credentials from string data."""
return DB_URL_RE.sub("//****:****@", data)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
+14
View File
@@ -1,12 +1,26 @@
"""Utils for sql."""
from __future__ import annotations
import logging
from homeassistant.components.recorder import get_instance
from homeassistant.core import HomeAssistant
from .const import DB_URL_RE
_LOGGER = logging.getLogger(__name__)
def redact_credentials(data: str | None) -> str:
"""Redact credentials from string data."""
if not data:
return "none"
return DB_URL_RE.sub("//****:****@", data)
def resolve_db_url(hass: HomeAssistant, db_url: str | None) -> str:
"""Return the db_url provided if not empty, otherwise return the recorder db_url."""
_LOGGER.debug("db_url: %s", redact_credentials(db_url))
if db_url and not db_url.isspace():
return db_url
return get_instance(hass).db_url
@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["pyunifiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["pyunifiprotect==4.8.2", "unifi-discovery==1.1.7"],
"requirements": ["pyunifiprotect==4.8.3", "unifi-discovery==1.1.7"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",
@@ -161,8 +161,9 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) ->
camera = instance.bootstrap.get_device_from_mac(doorbell_mac)
assert camera is not None
doorbell_ids.add(camera.id)
data_before_changed = chime.dict_with_excludes()
chime.camera_ids = sorted(doorbell_ids)
await chime.save_device()
await chime.save_device(data_before_changed)
def async_setup_services(hass: HomeAssistant) -> None:
+1 -1
View File
@@ -8,6 +8,6 @@
"iot_class": "local_polling",
"loggers": ["pyvizio"],
"quality_scale": "platinum",
"requirements": ["pyvizio==0.1.60"],
"requirements": ["pyvizio==0.1.61"],
"zeroconf": ["_viziocast._tcp.local."]
}
+27 -10
View File
@@ -14,7 +14,7 @@ from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .const import CONF_SIP_PORT, DOMAIN
from .devices import VoIPDevices
from .voip import HassVoipDatagramProtocol
@@ -39,6 +39,7 @@ class DomainData:
"""Domain data."""
transport: asyncio.DatagramTransport
protocol: HassVoipDatagramProtocol
devices: VoIPDevices
@@ -56,41 +57,57 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, data={**entry.data, "user": voip_user.id}
)
sip_port = entry.options.get(CONF_SIP_PORT, SIP_PORT)
devices = VoIPDevices(hass, entry)
devices.async_setup()
transport = await _create_sip_server(
transport, protocol = await _create_sip_server(
hass,
lambda: HassVoipDatagramProtocol(hass, devices),
sip_port,
)
_LOGGER.debug("Listening for VoIP calls on port %s", SIP_PORT)
_LOGGER.debug("Listening for VoIP calls on port %s", sip_port)
hass.data[DOMAIN] = DomainData(transport, devices)
hass.data[DOMAIN] = DomainData(transport, protocol, devices)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
async def update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
async def _create_sip_server(
hass: HomeAssistant,
protocol_factory: Callable[
[],
asyncio.DatagramProtocol,
],
) -> asyncio.DatagramTransport:
transport, _protocol = await hass.loop.create_datagram_endpoint(
sip_port: int,
) -> tuple[asyncio.DatagramTransport, HassVoipDatagramProtocol]:
transport, protocol = await hass.loop.create_datagram_endpoint(
protocol_factory,
local_addr=(_IP_WILDCARD, SIP_PORT),
local_addr=(_IP_WILDCARD, sip_port),
)
return transport
if not isinstance(protocol, HassVoipDatagramProtocol):
raise TypeError(f"Expected HassVoipDatagramProtocol, got {protocol}")
return transport, protocol
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload VoIP."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
_LOGGER.debug("Shut down VoIP server")
hass.data.pop(DOMAIN).transport.close()
_LOGGER.debug("Shutting down VoIP server")
data = hass.data.pop(DOMAIN)
data.transport.close()
await data.protocol.wait_closed()
_LOGGER.debug("VoIP server shut down successfully")
return unload_ok
+49 -4
View File
@@ -3,10 +3,15 @@ from __future__ import annotations
from typing import Any
from homeassistant import config_entries
from homeassistant.data_entry_flow import FlowResult
from voip_utils import SIP_PORT
import voluptuous as vol
from .const import DOMAIN
from homeassistant import config_entries
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import config_validation as cv
from .const import CONF_SIP_PORT, DOMAIN
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
@@ -22,9 +27,49 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="single_instance_allowed")
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_show_form(
step_id="user",
)
return self.async_create_entry(
title="Voice over IP",
data=user_input,
)
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> config_entries.OptionsFlow:
"""Create the options flow."""
return VoipOptionsFlowHandler(config_entry)
class VoipOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle VoIP options."""
def __init__(self, config_entry: config_entries.ConfigEntry) -> None:
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required(
CONF_SIP_PORT,
default=self.config_entry.options.get(
CONF_SIP_PORT,
SIP_PORT,
),
): cv.port
}
),
)
+2
View File
@@ -11,3 +11,5 @@ RTP_AUDIO_SETTINGS = {
"channels": CHANNELS,
"sleep_ratio": 0.99,
}
CONF_SIP_PORT = "sip_port"
+1
View File
@@ -139,6 +139,7 @@ class VoIPDevices:
manufacturer=manuf,
model=model,
sw_version=fw_version,
configuration_url=f"http://{call_info.caller_ip}",
)
voip_device = self.devices[voip_id] = VoIPDevice(
voip_id=voip_id,
Binary file not shown.
+1 -1
View File
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/voip",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["voip-utils==0.0.5"]
"requirements": ["voip-utils==0.0.6"]
}
@@ -28,5 +28,14 @@
}
}
}
},
"options": {
"step": {
"init": {
"data": {
"sip_port": "SIP port"
}
}
}
}
}
+47 -7
View File
@@ -84,12 +84,21 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol):
)
self.hass = hass
self.devices = devices
self._closed_event = asyncio.Event()
def is_valid_call(self, call_info: CallInfo) -> bool:
"""Filter calls."""
device = self.devices.async_get_or_create(call_info)
return device.async_allow_call(self.hass)
def connection_lost(self, exc):
"""Signal wait_closed when transport is completely closed."""
self.hass.loop.call_soon_threadsafe(self._closed_event.set)
async def wait_closed(self) -> None:
"""Wait for connection_lost to be called."""
await self._closed_event.wait()
class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
"""Run a voice assistant pipeline in a loop for a VoIP call."""
@@ -105,6 +114,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
buffered_chunks_before_speech: int = 100,
listening_tone_enabled: bool = True,
processing_tone_enabled: bool = True,
error_tone_enabled: bool = True,
tone_delay: float = 0.2,
tts_extra_timeout: float = 1.0,
) -> None:
@@ -120,6 +130,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
self.buffered_chunks_before_speech = buffered_chunks_before_speech
self.listening_tone_enabled = listening_tone_enabled
self.processing_tone_enabled = processing_tone_enabled
self.error_tone_enabled = error_tone_enabled
self.tone_delay = tone_delay
self.tts_extra_timeout = tts_extra_timeout
@@ -131,6 +142,8 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
self._session_id: str | None = None
self._tone_bytes: bytes | None = None
self._processing_bytes: bytes | None = None
self._error_bytes: bytes | None = None
self._pipeline_error: bool = False
def connection_made(self, transport):
"""Server is ready."""
@@ -161,8 +174,10 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
"""Forward audio to pipeline STT and handle TTS."""
if self._session_id is None:
self._session_id = ulid()
if self.listening_tone_enabled:
await self._play_listening_tone()
# Play listening tone at the start of each cycle
if self.listening_tone_enabled:
await self._play_listening_tone()
try:
# Wait for speech before starting pipeline
@@ -221,11 +236,16 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
tts_audio_output="raw",
)
# Block until TTS is done speaking.
#
# This is set in _send_tts and has a timeout that's based on the
# length of the TTS audio.
await self._tts_done.wait()
if self._pipeline_error:
self._pipeline_error = False
if self.error_tone_enabled:
await self._play_error_tone()
else:
# Block until TTS is done speaking.
#
# This is set in _send_tts and has a timeout that's based on the
# length of the TTS audio.
await self._tts_done.wait()
_LOGGER.debug("Pipeline finished")
except asyncio.TimeoutError:
@@ -307,6 +327,9 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
self._send_tts(media_id),
"voip_pipeline_tts",
)
elif event.type == PipelineEventType.ERROR:
# Play error tone instead of wait for TTS
self._pipeline_error = True
async def _send_tts(self, media_id: str) -> None:
"""Send TTS audio to caller via RTP."""
@@ -372,6 +395,23 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
)
)
async def _play_error_tone(self) -> None:
"""Play a tone to indicate a pipeline error occurred."""
if self._error_bytes is None:
# Do I/O in executor
self._error_bytes = await self.hass.async_add_executor_job(
self._load_pcm,
"error.pcm",
)
await self.hass.async_add_executor_job(
partial(
self.send_audio,
self._error_bytes,
**RTP_AUDIO_SETTINGS,
)
)
def _load_pcm(self, file_name: str) -> bytes:
"""Load raw audio (16Khz, 16-bit mono)."""
return (Path(__file__).parent / file_name).read_bytes()
@@ -281,13 +281,13 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry):
else:
return self.async_create_entry(data=combined_input)
saved_options = self.options.copy()
if saved_options[CONF_PROVINCE] is None:
saved_options[CONF_PROVINCE] = NONE_SENTINEL
schema: vol.Schema = await self.hass.async_add_executor_job(
add_province_to_schema, DATA_SCHEMA_OPT, self.options
)
new_schema = self.add_suggested_values_to_schema(schema, user_input)
new_schema = self.add_suggested_values_to_schema(
schema, user_input or self.options
)
return self.async_show_form(
step_id="init",
@@ -22,6 +22,7 @@ from .core import discovery
from .core.const import (
CLUSTER_HANDLER_ACCELEROMETER,
CLUSTER_HANDLER_BINARY_INPUT,
CLUSTER_HANDLER_HUE_OCCUPANCY,
CLUSTER_HANDLER_OCCUPANCY,
CLUSTER_HANDLER_ON_OFF,
CLUSTER_HANDLER_ZONE,
@@ -130,6 +131,11 @@ class Occupancy(BinarySensor):
_attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY
@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY)
class HueOccupancy(Occupancy):
"""ZHA Hue occupancy."""
@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF)
class Opening(BinarySensor):
"""ZHA OnOff BinarySensor."""
@@ -424,13 +424,13 @@ class ClusterHandler(LogMixin):
else:
raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}")
self._endpoint.device.zha_send_event(
self._endpoint.send_event(
{
ATTR_UNIQUE_ID: self.unique_id,
ATTR_CLUSTER_ID: self.cluster.cluster_id,
ATTR_COMMAND: command,
# Maintain backwards compatibility with the old zigpy response format
ATTR_ARGS: args, # type: ignore[dict-item]
ATTR_ARGS: args,
ATTR_PARAMS: params,
}
)
@@ -347,7 +347,7 @@ class OnOffClientClusterHandler(ClientClusterHandler):
class OnOffClusterHandler(ClusterHandler):
"""Cluster handler for the OnOff Zigbee cluster."""
ON_OFF = 0
ON_OFF = general.OnOff.attributes_by_name["on_off"].id
REPORT_CONFIG = (AttrReportConfig(attr="on_off", config=REPORT_CONFIG_IMMEDIATE),)
ZCL_INIT_ATTRS = {
"start_up_on_off": True,
@@ -374,6 +374,15 @@ class OnOffClusterHandler(ClusterHandler):
if self.cluster.endpoint.model == "TS011F":
self.ZCL_INIT_ATTRS["child_lock"] = True
@classmethod
def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool:
"""Filter the cluster match for specific devices."""
return not (
cluster.endpoint.device.manufacturer == "Konke"
and cluster.endpoint.device.model
in ("3AFE280100510001", "3AFE170100510001")
)
@property
def on_off(self) -> bool | None:
"""Return cached value of on/off attribute."""
@@ -78,6 +78,7 @@ CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CLUSTER_HANDLER_EVENT_RELAY = "event_relay"
CLUSTER_HANDLER_FAN = "fan"
CLUSTER_HANDLER_HUMIDITY = "humidity"
CLUSTER_HANDLER_HUE_OCCUPANCY = "philips_occupancy"
CLUSTER_HANDLER_SOIL_MOISTURE = "soil_moisture"
CLUSTER_HANDLER_LEAF_WETNESS = "leaf_wetness"
CLUSTER_HANDLER_IAS_ACE = "ias_ace"
@@ -205,11 +205,13 @@ class Endpoint:
def send_event(self, signal: dict[str, Any]) -> None:
"""Broadcast an event from this endpoint."""
signal["endpoint"] = {
"id": self.id,
"unique_id": self.unique_id,
}
self.device.zha_send_event(signal)
self.device.zha_send_event(
{
const.ATTR_UNIQUE_ID: self.unique_id,
const.ATTR_ENDPOINT_ID: self.id,
**signal,
}
)
def claim_cluster_handlers(self, cluster_handlers: list[ClusterHandler]) -> None:
"""Claim cluster handlers."""
+2 -2
View File
@@ -20,10 +20,10 @@
"zigpy_znp"
],
"requirements": [
"bellows==0.35.1",
"bellows==0.35.2",
"pyserial==3.5",
"pyserial-asyncio==0.6",
"zha-quirks==0.0.97",
"zha-quirks==0.0.98",
"zigpy-deconz==0.21.0",
"zigpy==0.55.0",
"zigpy-xbee==0.18.0",
+3 -3
View File
@@ -20,9 +20,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .core import discovery
from .core.const import (
CLUSTER_HANDLER_HUE_OCCUPANCY,
CLUSTER_HANDLER_IAS_WD,
CLUSTER_HANDLER_INOVELLI,
CLUSTER_HANDLER_OCCUPANCY,
CLUSTER_HANDLER_ON_OFF,
DATA_ZHA,
SIGNAL_ADD_ENTITIES,
@@ -367,7 +367,7 @@ class HueV1MotionSensitivities(types.enum8):
@CONFIG_DIAGNOSTIC_MATCH(
cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY,
cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY,
manufacturers={"Philips", "Signify Netherlands B.V."},
models={"SML001"},
)
@@ -390,7 +390,7 @@ class HueV2MotionSensitivities(types.enum8):
@CONFIG_DIAGNOSTIC_MATCH(
cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY,
cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY,
manufacturers={"Philips", "Signify Netherlands B.V."},
models={"SML002", "SML003", "SML004"},
)
+15 -7
View File
@@ -248,13 +248,16 @@ class Battery(Sensor):
return state_attrs
@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT)
@MULTI_MATCH(
cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT,
stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT,
models={"VZM31-SN", "SP 234", "outletv4"},
)
class ElectricalMeasurement(Sensor):
"""Active power measurement."""
SENSOR_ATTR = "active_power"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER
_attr_should_poll = True # BaseZhaEntity defaults to False
_attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT
_attr_name: str = "Active power"
_attr_native_unit_of_measurement: str = UnitOfPower.WATT
@@ -284,6 +287,16 @@ class ElectricalMeasurement(Sensor):
return round(value, self._decimals)
return round(value)
@MULTI_MATCH(
cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT,
stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT,
)
class PolledElectricalMeasurement(ElectricalMeasurement):
"""Polled active power measurement."""
_attr_should_poll = True # BaseZhaEntity defaults to False
async def async_update(self) -> None:
"""Retrieve latest state."""
if not self.available:
@@ -299,7 +312,6 @@ class ElectricalMeasurementApparentPower(
SENSOR_ATTR = "apparent_power"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.APPARENT_POWER
_attr_should_poll = False # Poll indirectly by ElectricalMeasurementSensor
_attr_name: str = "Apparent power"
_attr_native_unit_of_measurement = UnitOfApparentPower.VOLT_AMPERE
_div_mul_prefix = "ac_power"
@@ -311,7 +323,6 @@ class ElectricalMeasurementRMSCurrent(ElectricalMeasurement, id_suffix="rms_curr
SENSOR_ATTR = "rms_current"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.CURRENT
_attr_should_poll = False # Poll indirectly by ElectricalMeasurementSensor
_attr_name: str = "RMS current"
_attr_native_unit_of_measurement = UnitOfElectricCurrent.AMPERE
_div_mul_prefix = "ac_current"
@@ -323,7 +334,6 @@ class ElectricalMeasurementRMSVoltage(ElectricalMeasurement, id_suffix="rms_volt
SENSOR_ATTR = "rms_voltage"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLTAGE
_attr_should_poll = False # Poll indirectly by ElectricalMeasurementSensor
_attr_name: str = "RMS voltage"
_attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT
_div_mul_prefix = "ac_voltage"
@@ -335,7 +345,6 @@ class ElectricalMeasurementFrequency(ElectricalMeasurement, id_suffix="ac_freque
SENSOR_ATTR = "ac_frequency"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.FREQUENCY
_attr_should_poll = False # Poll indirectly by ElectricalMeasurementSensor
_attr_name: str = "AC frequency"
_attr_native_unit_of_measurement = UnitOfFrequency.HERTZ
_div_mul_prefix = "ac_frequency"
@@ -347,7 +356,6 @@ class ElectricalMeasurementPowerFactor(ElectricalMeasurement, id_suffix="power_f
SENSOR_ATTR = "power_factor"
_attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER_FACTOR
_attr_should_poll = False # Poll indirectly by ElectricalMeasurementSensor
_attr_name: str = "Power factor"
_attr_native_unit_of_measurement = PERCENTAGE
+1 -1
View File
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 5
PATCH_VERSION: Final = "0b1"
PATCH_VERSION: Final = "0b6"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
+14 -3
View File
@@ -12,7 +12,7 @@ attrs==22.2.0
awesomeversion==22.9.0
bcrypt==4.0.1
bleak-retry-connector==3.0.2
bleak==0.20.1
bleak==0.20.2
bluetooth-adapters==0.15.3
bluetooth-auto-recovery==1.0.3
bluetooth-data-tools==0.4.0
@@ -25,7 +25,7 @@ ha-av==10.0.0
hass-nabucasa==0.66.2
hassil==1.0.6
home-assistant-bluetooth==1.10.0
home-assistant-frontend==20230427.0
home-assistant-frontend==20230501.0
home-assistant-intents==2023.4.26
httpx==0.24.0
ifaddr==0.1.7
@@ -45,7 +45,7 @@ pyudev==0.23.2
pyyaml==6.0
requests==2.28.2
scapy==2.5.0
sqlalchemy==2.0.11
sqlalchemy==2.0.12
typing-extensions>=4.5.0,<5.0
ulid-transform==0.7.0
voluptuous-serialize==2.6.0
@@ -168,3 +168,14 @@ faust-cchardet>=2.1.18
# which break wheel builds so we need at least 11.0.1
# https://github.com/aaugustin/websockets/issues/1329
websockets>=11.0.1
# pyasn1 0.5.0 has breaking changes which cause pysnmplib to fail
# until they are resolved, we need to pin pyasn1 to 0.4.8 and
# pysnmplib to 5.0.21 to avoid the issue.
# https://github.com/pyasn1/pyasn1/pull/30#issuecomment-1517564335
# https://github.com/pysnmp/pysnmp/issues/51
pyasn1==0.4.8
pysnmplib==5.0.21
# pysnmp is no longer maintained and does not work with newer
# python
pysnmp==1000000000.0.0
+1 -1
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.5.0b1"
version = "2023.5.0b6"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
+21 -21
View File
@@ -386,7 +386,7 @@ asyncpysupla==0.0.5
asyncsleepiq==1.3.4
# homeassistant.components.aten_pe
atenpdu==0.3.2
# atenpdu==0.3.2
# homeassistant.components.aurora
auroranoaa==0.0.3
@@ -419,7 +419,7 @@ base36==0.1.1
batinfo==0.4.2
# homeassistant.components.eddystone_temperature
# beacontools[scan]==1.2.3
# beacontools[scan]==2.1.0
# homeassistant.components.scrape
beautifulsoup4==4.11.1
@@ -428,10 +428,10 @@ beautifulsoup4==4.11.1
# beewi_smartclim==0.0.10
# homeassistant.components.zha
bellows==0.35.1
bellows==0.35.2
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.0
bimmer_connected==0.13.2
# homeassistant.components.bizkaibus
bizkaibus==0.1.1
@@ -440,7 +440,7 @@ bizkaibus==0.1.1
bleak-retry-connector==3.0.2
# homeassistant.components.bluetooth
bleak==0.20.1
bleak==0.20.2
# homeassistant.components.blebox
blebox_uniapi==2.1.4
@@ -665,7 +665,7 @@ enocean==0.50
enturclient==0.2.4
# homeassistant.components.environment_canada
env_canada==0.5.33
env_canada==0.5.34
# homeassistant.components.enphase_envoy
envoy_reader==0.20.1
@@ -911,7 +911,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230427.0
home-assistant-frontend==20230501.0
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@@ -983,7 +983,7 @@ influxdb==5.3.1
inkbird-ble==0.5.6
# homeassistant.components.insteon
insteon-frontend-home-assistant==0.3.4
insteon-frontend-home-assistant==0.3.5
# homeassistant.components.intellifire
intellifire4py==2.2.2
@@ -1034,7 +1034,7 @@ krakenex==2.1.0
lacrosse-view==0.0.9
# homeassistant.components.eufy
lakeside==0.12
lakeside==0.13
# homeassistant.components.laundrify
laundrify_aio==1.1.2
@@ -1264,7 +1264,7 @@ ondilo==0.2.0
onkyo-eiscp==1.2.7
# homeassistant.components.onvif
onvif-zeep-async==1.3.0
onvif-zeep-async==1.3.1
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -1374,7 +1374,7 @@ plexauth==0.0.6
plexwebsocket==0.0.13
# homeassistant.components.plugwise
plugwise==0.31.0
plugwise==0.31.1
# homeassistant.components.plum_lightpad
plumlightpad==0.0.11
@@ -1812,7 +1812,7 @@ pynetgear==0.10.9
pynetio==0.1.9.1
# homeassistant.components.nina
pynina==0.2.0
pynina==0.3.0
# homeassistant.components.nobo_hub
pynobo==1.6.0
@@ -2108,7 +2108,7 @@ python-qbittorrent==0.4.2
python-ripple-api==0.0.3
# homeassistant.components.roborock
python-roborock==0.6.5
python-roborock==0.8.3
# homeassistant.components.smarttub
python-smarttub==0.0.33
@@ -2158,7 +2158,7 @@ pytrafikverket==0.2.3
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.8.2
pyunifiprotect==4.8.3
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@@ -2176,7 +2176,7 @@ pyversasense==0.0.6
pyvesync==2.1.1
# homeassistant.components.vizio
pyvizio==0.1.60
pyvizio==0.1.61
# homeassistant.components.velux
pyvlx==0.2.20
@@ -2378,7 +2378,7 @@ snapcast==2.3.2
soco==0.29.1
# homeassistant.components.solaredge_local
solaredge-local==0.2.0
solaredge-local==0.2.3
# homeassistant.components.solaredge
solaredge==0.0.2
@@ -2390,7 +2390,7 @@ solax==0.3.0
somfy-mylink-synergy==1.0.6
# homeassistant.components.sonos
sonos-websocket==0.0.5
sonos-websocket==0.1.0
# homeassistant.components.marytts
speak2mary==1.4.0
@@ -2406,7 +2406,7 @@ spotipy==2.23.0
# homeassistant.components.recorder
# homeassistant.components.sql
sqlalchemy==2.0.11
sqlalchemy==2.0.12
# homeassistant.components.srp_energy
srpenergy==1.3.6
@@ -2594,7 +2594,7 @@ venstarcolortouch==0.19
vilfo-api-client==0.3.2
# homeassistant.components.voip
voip-utils==0.0.5
voip-utils==0.0.6
# homeassistant.components.volkszaehler
volkszaehler==0.4.0
@@ -2688,7 +2688,7 @@ yalesmartalarmclient==0.3.9
yalexs-ble==2.1.16
# homeassistant.components.august
yalexs==1.3.2
yalexs==1.3.3
# homeassistant.components.yeelight
yeelight==0.7.10
@@ -2718,7 +2718,7 @@ zeroconf==0.58.2
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.97
zha-quirks==0.0.98
# homeassistant.components.zhong_hong
zhong_hong_hvac==1.0.9
+17 -17
View File
@@ -361,16 +361,16 @@ base36==0.1.1
beautifulsoup4==4.11.1
# homeassistant.components.zha
bellows==0.35.1
bellows==0.35.2
# homeassistant.components.bmw_connected_drive
bimmer_connected==0.13.0
bimmer_connected==0.13.2
# homeassistant.components.bluetooth
bleak-retry-connector==3.0.2
# homeassistant.components.bluetooth
bleak==0.20.1
bleak==0.20.2
# homeassistant.components.blebox
blebox_uniapi==2.1.4
@@ -524,7 +524,7 @@ energyzero==0.4.1
enocean==0.50
# homeassistant.components.environment_canada
env_canada==0.5.33
env_canada==0.5.34
# homeassistant.components.enphase_envoy
envoy_reader==0.20.1
@@ -700,7 +700,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230427.0
home-assistant-frontend==20230501.0
# homeassistant.components.conversation
home-assistant-intents==2023.4.26
@@ -748,7 +748,7 @@ influxdb==5.3.1
inkbird-ble==0.5.6
# homeassistant.components.insteon
insteon-frontend-home-assistant==0.3.4
insteon-frontend-home-assistant==0.3.5
# homeassistant.components.intellifire
intellifire4py==2.2.2
@@ -945,7 +945,7 @@ omnilogic==0.4.5
ondilo==0.2.0
# homeassistant.components.onvif
onvif-zeep-async==1.3.0
onvif-zeep-async==1.3.1
# homeassistant.components.opengarage
open-garage==0.2.0
@@ -1016,7 +1016,7 @@ plexauth==0.0.6
plexwebsocket==0.0.13
# homeassistant.components.plugwise
plugwise==0.31.0
plugwise==0.31.1
# homeassistant.components.plum_lightpad
plumlightpad==0.0.11
@@ -1316,7 +1316,7 @@ pymysensors==0.24.0
pynetgear==0.10.9
# homeassistant.components.nina
pynina==0.2.0
pynina==0.3.0
# homeassistant.components.nobo_hub
pynobo==1.6.0
@@ -1516,7 +1516,7 @@ python-picnic-api==1.1.0
python-qbittorrent==0.4.2
# homeassistant.components.roborock
python-roborock==0.6.5
python-roborock==0.8.3
# homeassistant.components.smarttub
python-smarttub==0.0.33
@@ -1554,7 +1554,7 @@ pytrafikverket==0.2.3
pyudev==0.23.2
# homeassistant.components.unifiprotect
pyunifiprotect==4.8.2
pyunifiprotect==4.8.3
# homeassistant.components.uptimerobot
pyuptimerobot==22.2.0
@@ -1566,7 +1566,7 @@ pyvera==0.3.13
pyvesync==2.1.1
# homeassistant.components.vizio
pyvizio==0.1.60
pyvizio==0.1.61
# homeassistant.components.volumio
pyvolumio==0.1.5
@@ -1714,7 +1714,7 @@ solax==0.3.0
somfy-mylink-synergy==1.0.6
# homeassistant.components.sonos
sonos-websocket==0.0.5
sonos-websocket==0.1.0
# homeassistant.components.marytts
speak2mary==1.4.0
@@ -1730,7 +1730,7 @@ spotipy==2.23.0
# homeassistant.components.recorder
# homeassistant.components.sql
sqlalchemy==2.0.11
sqlalchemy==2.0.12
# homeassistant.components.srp_energy
srpenergy==1.3.6
@@ -1870,7 +1870,7 @@ venstarcolortouch==0.19
vilfo-api-client==0.3.2
# homeassistant.components.voip
voip-utils==0.0.5
voip-utils==0.0.6
# homeassistant.components.volvooncall
volvooncall==0.10.2
@@ -1943,7 +1943,7 @@ yalesmartalarmclient==0.3.9
yalexs-ble==2.1.16
# homeassistant.components.august
yalexs==1.3.2
yalexs==1.3.3
# homeassistant.components.yeelight
yeelight==0.7.10
@@ -1964,7 +1964,7 @@ zeroconf==0.58.2
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.97
zha-quirks==0.0.98
# homeassistant.components.zha
zigpy-deconz==0.21.0
+12
View File
@@ -21,6 +21,7 @@ else:
COMMENT_REQUIREMENTS = (
"Adafruit_BBIO",
"atenpdu", # depends on pysnmp which is not maintained at this time
"avea", # depends on bluepy
"avion",
"azure-servicebus", # depends on uamqp, which requires OpenSSL 1.1
@@ -172,6 +173,17 @@ faust-cchardet>=2.1.18
# which break wheel builds so we need at least 11.0.1
# https://github.com/aaugustin/websockets/issues/1329
websockets>=11.0.1
# pyasn1 0.5.0 has breaking changes which cause pysnmplib to fail
# until they are resolved, we need to pin pyasn1 to 0.4.8 and
# pysnmplib to 5.0.21 to avoid the issue.
# https://github.com/pyasn1/pyasn1/pull/30#issuecomment-1517564335
# https://github.com/pysnmp/pysnmp/issues/51
pyasn1==0.4.8
pysnmplib==5.0.21
# pysnmp is no longer maintained and does not work with newer
# python
pysnmp==1000000000.0.0
"""
IGNORE_PRE_COMMIT_HOOK_ID = (
@@ -730,6 +730,52 @@ async def test_zeroconf_ip_change_via_secondary_identifier(
assert len(mock_async_setup.mock_calls) == 2
assert entry.data[CONF_ADDRESS] == "127.0.0.1"
assert unrelated_entry.data[CONF_ADDRESS] == "127.0.0.2"
assert set(entry.data[CONF_IDENTIFIERS]) == {"airplayid", "mrpid"}
async def test_zeroconf_updates_identifiers_for_ignored_entries(
hass: HomeAssistant, mock_scan
) -> None:
"""Test that an ignored config entry gets updated when the ip changes.
Instead of checking only the unique id, all the identifiers
in the config entry are checked
"""
entry = MockConfigEntry(
domain="apple_tv",
unique_id="aa:bb:cc:dd:ee:ff",
source=config_entries.SOURCE_IGNORE,
data={CONF_IDENTIFIERS: ["mrpid"], CONF_ADDRESS: "127.0.0.2"},
)
unrelated_entry = MockConfigEntry(
domain="apple_tv", unique_id="unrelated", data={CONF_ADDRESS: "127.0.0.2"}
)
unrelated_entry.add_to_hass(hass)
entry.add_to_hass(hass)
mock_scan.result = [
create_conf(
IPv4Address("127.0.0.1"), "Device", mrp_service(), airplay_service()
)
]
with patch(
"homeassistant.components.apple_tv.async_setup_entry", return_value=True
) as mock_async_setup:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data=DMAP_SERVICE,
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert (
len(mock_async_setup.mock_calls) == 0
) # Should not be called because entry is ignored
assert entry.data[CONF_ADDRESS] == "127.0.0.1"
assert unrelated_entry.data[CONF_ADDRESS] == "127.0.0.2"
assert set(entry.data[CONF_IDENTIFIERS]) == {"airplayid", "mrpid"}
async def test_zeroconf_add_existing_aborts(hass: HomeAssistant, dmap_device) -> None:
@@ -139,6 +139,22 @@
}),
]),
}),
'climate': dict({
'account_timezone': dict({
'_dst_offset': '0:00:00',
'_dst_saved': '0:00:00',
'_hasdst': False,
'_std_offset': '0:00:00',
'_tznames': list([
'UTC',
'UTC',
]),
}),
'activity': 'STANDBY',
'activity_end_time': None,
'activity_end_time_no_tz': None,
'is_climate_on': False,
}),
'condition_based_services': dict({
'is_service_required': False,
'messages': list([
@@ -808,6 +824,32 @@
]),
'name': 'i4 eDrive40',
'timestamp': '2023-01-04T14:57:06+00:00',
'tires': dict({
'front_left': dict({
'current_pressure': 241,
'manufacturing_week': '2021-10-04T00:00:00',
'season': 2,
'target_pressure': 269,
}),
'front_right': dict({
'current_pressure': 255,
'manufacturing_week': '2019-06-10T00:00:00',
'season': 2,
'target_pressure': 269,
}),
'rear_left': dict({
'current_pressure': 324,
'manufacturing_week': '2019-03-18T00:00:00',
'season': 2,
'target_pressure': 303,
}),
'rear_right': dict({
'current_pressure': 331,
'manufacturing_week': '2019-03-18T00:00:00',
'season': 2,
'target_pressure': 303,
}),
}),
'vehicle_location': dict({
'account_region': 'row',
'heading': '**REDACTED**',
@@ -969,6 +1011,22 @@
'messages': list([
]),
}),
'climate': dict({
'account_timezone': dict({
'_dst_offset': '0:00:00',
'_dst_saved': '0:00:00',
'_hasdst': False,
'_std_offset': '0:00:00',
'_tznames': list([
'UTC',
'UTC',
]),
}),
'activity': 'UNKNOWN',
'activity_end_time': None,
'activity_end_time_no_tz': None,
'is_climate_on': False,
}),
'condition_based_services': dict({
'is_service_required': False,
'messages': list([
@@ -1466,6 +1524,7 @@
]),
'name': 'i3 (+ REX)',
'timestamp': '2022-07-10T09:25:53+00:00',
'tires': None,
'vehicle_location': dict({
'account_region': 'row',
'heading': None,
@@ -2456,6 +2515,22 @@
'messages': list([
]),
}),
'climate': dict({
'account_timezone': dict({
'_dst_offset': '0:00:00',
'_dst_saved': '0:00:00',
'_hasdst': False,
'_std_offset': '0:00:00',
'_tznames': list([
'UTC',
'UTC',
]),
}),
'activity': 'UNKNOWN',
'activity_end_time': None,
'activity_end_time_no_tz': None,
'is_climate_on': False,
}),
'condition_based_services': dict({
'is_service_required': False,
'messages': list([
@@ -2953,6 +3028,7 @@
]),
'name': 'i3 (+ REX)',
'timestamp': '2022-07-10T09:25:53+00:00',
'tires': None,
'vehicle_location': dict({
'account_region': 'row',
'heading': None,
@@ -0,0 +1,22 @@
# serializer version: 1
# name: test_entity_state_attrs
list([
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by MyBMW',
'device_class': 'battery',
'friendly_name': 'i4 eDrive40 Target SoC',
'icon': 'mdi:battery-charging-medium',
'max': 100.0,
'min': 20.0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 5.0,
}),
'context': <ANY>,
'entity_id': 'number.i4_edrive40_target_soc',
'last_changed': <ANY>,
'last_updated': <ANY>,
'state': '80',
}),
])
# ---
@@ -1,38 +1,6 @@
# serializer version: 1
# name: test_entity_state_attrs
list([
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by MyBMW',
'friendly_name': 'i4 eDrive40 Target SoC',
'icon': 'mdi:battery-charging-medium',
'options': list([
'20',
'25',
'30',
'35',
'40',
'45',
'50',
'55',
'60',
'65',
'70',
'75',
'80',
'85',
'90',
'95',
'100',
]),
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'select.i4_edrive40_target_soc',
'last_changed': <ANY>,
'last_updated': <ANY>,
'state': '80',
}),
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by MyBMW',
@@ -0,0 +1,123 @@
"""Test BMW numbers."""
from unittest.mock import AsyncMock
from bimmer_connected.models import MyBMWAPIError, MyBMWRemoteServiceError
from bimmer_connected.vehicle.remote_services import RemoteServices
import pytest
import respx
from syrupy.assertion import SnapshotAssertion
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from . import setup_mocked_integration
async def test_entity_state_attrs(
hass: HomeAssistant,
bmw_fixture: respx.Router,
snapshot: SnapshotAssertion,
) -> None:
"""Test number options and values.."""
# Setup component
assert await setup_mocked_integration(hass)
# Get all number entities
assert hass.states.async_all("number") == snapshot
@pytest.mark.parametrize(
("entity_id", "value"),
[
("number.i4_edrive40_target_soc", "80"),
],
)
async def test_update_triggers_success(
hass: HomeAssistant,
entity_id: str,
value: str,
bmw_fixture: respx.Router,
) -> None:
"""Test allowed values for number inputs."""
# Setup component
assert await setup_mocked_integration(hass)
# Test
await hass.services.async_call(
"number",
"set_value",
service_data={"value": value},
blocking=True,
target={"entity_id": entity_id},
)
assert RemoteServices.trigger_remote_service.call_count == 1
@pytest.mark.parametrize(
("entity_id", "value"),
[
("number.i4_edrive40_target_soc", "81"),
],
)
async def test_update_triggers_fail(
hass: HomeAssistant,
entity_id: str,
value: str,
bmw_fixture: respx.Router,
) -> None:
"""Test not allowed values for number inputs."""
# Setup component
assert await setup_mocked_integration(hass)
# Test
with pytest.raises(ValueError):
await hass.services.async_call(
"number",
"set_value",
service_data={"value": value},
blocking=True,
target={"entity_id": entity_id},
)
assert RemoteServices.trigger_remote_service.call_count == 0
@pytest.mark.parametrize(
("raised", "expected"),
[
(MyBMWRemoteServiceError, HomeAssistantError),
(MyBMWAPIError, HomeAssistantError),
(ValueError, ValueError),
],
)
async def test_update_triggers_exceptions(
hass: HomeAssistant,
raised: Exception,
expected: Exception,
bmw_fixture: respx.Router,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test not allowed values for number inputs."""
# Setup component
assert await setup_mocked_integration(hass)
# Setup exception
monkeypatch.setattr(
RemoteServices,
"trigger_remote_service",
AsyncMock(side_effect=raised),
)
# Test
with pytest.raises(expected):
await hass.services.async_call(
"number",
"set_value",
service_data={"value": "80"},
blocking=True,
target={"entity_id": "number.i4_edrive40_target_soc"},
)
assert RemoteServices.trigger_remote_service.call_count == 1
@@ -28,7 +28,6 @@ async def test_entity_state_attrs(
[
("select.i3_rex_charging_mode", "IMMEDIATE_CHARGING"),
("select.i4_edrive40_ac_charging_limit", "16"),
("select.i4_edrive40_target_soc", "80"),
("select.i4_edrive40_charging_mode", "DELAYED_CHARGING"),
],
)
@@ -58,7 +57,6 @@ async def test_update_triggers_success(
("entity_id", "value"),
[
("select.i4_edrive40_ac_charging_limit", "17"),
("select.i4_edrive40_target_soc", "81"),
],
)
async def test_update_triggers_fail(
+10 -4
View File
@@ -25,8 +25,14 @@ from homeassistant.components.fan import (
SERVICE_SET_DIRECTION,
SERVICE_SET_PERCENTAGE,
SERVICE_SET_PRESET_MODE,
FanEntityFeature,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
@@ -211,9 +217,9 @@ async def test_turn_on_fan_preset_mode(hass: HomeAssistant) -> None:
bond_device_id="test-device-id",
props={"max_speed": 6},
)
assert hass.states.get("fan.name_1").attributes[ATTR_PRESET_MODES] == [
PRESET_MODE_BREEZE
]
state = hass.states.get("fan.name_1")
assert state.attributes[ATTR_PRESET_MODES] == [PRESET_MODE_BREEZE]
assert state.attributes[ATTR_SUPPORTED_FEATURES] & FanEntityFeature.PRESET_MODE
with patch_bond_action() as mock_set_preset_mode, patch_bond_device_state():
await turn_fan_on(hass, "fan.name_1", preset_mode=PRESET_MODE_BREEZE)
@@ -4,6 +4,9 @@ from unittest.mock import patch
import pytest
from homeassistant.components import conversation
from homeassistant.components.homeassistant.exposed_entities import (
async_get_assistant_settings,
)
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.core import DOMAIN as HASS_DOMAIN, Context, HomeAssistant
from homeassistant.helpers import (
@@ -137,3 +140,34 @@ async def test_conversation_agent(
return_value={"homeassistant": ["dwarvish", "elvish", "entish"]},
):
assert agent.supported_languages == ["dwarvish", "elvish", "entish"]
async def test_expose_flag_automatically_set(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
) -> None:
"""Test DefaultAgent sets the expose flag on all entities automatically."""
assert await async_setup_component(hass, "homeassistant", {})
light = entity_registry.async_get_or_create("light", "demo", "1234")
test = entity_registry.async_get_or_create("test", "demo", "1234")
assert async_get_assistant_settings(hass, conversation.DOMAIN) == {}
assert await async_setup_component(hass, "conversation", {})
await hass.async_block_till_done()
# After setting up conversation, the expose flag should now be set on all entities
assert async_get_assistant_settings(hass, conversation.DOMAIN) == {
light.entity_id: {"should_expose": True},
test.entity_id: {"should_expose": False},
}
# New entities will automatically have the expose flag set
new_light = entity_registry.async_get_or_create("light", "demo", "2345")
await hass.async_block_till_done()
assert async_get_assistant_settings(hass, conversation.DOMAIN) == {
light.entity_id: {"should_expose": True},
new_light.entity_id: {"should_expose": True},
test.entity_id: {"should_expose": False},
}
+6
View File
@@ -308,6 +308,12 @@ async def test_invalid_state(recorder_mock: Recorder, hass: HomeAssistant) -> No
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "unknown")
await hass.async_block_till_done()
state = hass.states.get("sensor.test")
assert state.state == STATE_UNKNOWN
hass.states.async_set("sensor.test_monitored", STATE_UNAVAILABLE)
await hass.async_block_till_done()
@@ -0,0 +1,6 @@
{
"enableAutoReply": true,
"responseSubject": "Vacation",
"responseBodyPlainText": "I am on vacation.",
"restrictToContacts": false
}

Some files were not shown because too many files have changed in this diff Show More