Compare commits

...

86 Commits

Author SHA1 Message Date
Franck Nijhof
b1fb77cb4d 2024.4.1 (#114934) 2024-04-05 16:18:02 +02:00
Joost Lekkerkerker
95606135a6 Fix ROVA validation (#114938)
* Fix ROVA validation

* Fix ROVA validation
2024-04-05 14:53:21 +02:00
Aidan Timson
47d9879c0c Pin systembridgemodels to 4.0.4 (#114842) 2024-04-05 14:53:17 +02:00
Franck Nijhof
e3c111b1dd Bump version to 2024.4.1 2024-04-05 12:34:07 +02:00
Joost Lekkerkerker
9937743863 Fix cast dashboard in media browser (#114924) 2024-04-05 12:33:49 +02:00
Joost Lekkerkerker
ed3daed869 Create right import issues in Downloader (#114922)
* Create right import issues in Downloader

* Create right import issues in Downloader

* Create right import issues in Downloader

* Create right import issues in Downloader

* Fix

* Fix

* Fix

* Fix

* Apply suggestions from code review

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Fix

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2024-04-05 12:33:46 +02:00
Åke Strandberg
5d5dc24b33 Show correct model string in myuplink (#114921) 2024-04-05 12:33:43 +02:00
J. Nick Koston
c39d6f0730 Reduce august polling frequency (#114904)
Co-authored-by: TheJulianJES <TheJulianJES@users.noreply.github.com>
2024-04-05 12:33:40 +02:00
J. Nick Koston
87ffd5ac56 Ensure all tables have the default table args in the db_schema (#114895) 2024-04-05 12:33:36 +02:00
Bram Kragten
71877fdeda Update frontend to 20240404.1 (#114890) 2024-04-05 12:33:33 +02:00
Robert Svensson
2434a22e4e Fix Axis reconfigure step not providing protocols as alternatives but as string (#114889) 2024-04-05 12:33:30 +02:00
Jeef
618fa08ab2 Bump weatherflow4py to 0.2.20 (#114888) 2024-04-05 12:33:27 +02:00
Robert Svensson
96003e3562 Fix Axis camera platform support HTTPS (#114886) 2024-04-05 12:33:24 +02:00
Bram Kragten
411e55d059 Update frontend to 20240404.0 (#114859) 2024-04-05 12:33:21 +02:00
Joost Lekkerkerker
58533f02af Fix Downloader YAML import (#114844) 2024-04-05 12:33:18 +02:00
Joost Lekkerkerker
aa14793479 Avoid blocking IO in downloader initialization (#114841)
* Avoid blocking IO in downloader initialization

* Avoid blocking IO in downloader initialization
2024-04-05 12:33:15 +02:00
J. Nick Koston
0191d3e41b Refactor ConfigStore to avoid needing to pass config_dir (#114827)
Co-authored-by: Erik <erik@montnemery.com>
2024-04-05 12:33:12 +02:00
tronikos
319f76cdc8 Bump opower to 0.4.3 (#114826)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-04-05 12:33:09 +02:00
J. Nick Koston
530725bbfa Handle ambiguous script actions by using action map order (#114825) 2024-04-05 12:33:06 +02:00
Lex Li
d8ae7d6955 Fix type cast in snmp (#114795) 2024-04-05 12:33:03 +02:00
cdheiser
3d0bafbdc9 Fix Lutron light brightness values (#114794)
Fix brightness values in light.py

Bugfix to set the brightness to 0-100 which is what Lutron expects.
2024-04-05 12:33:00 +02:00
Aaron Bach
ef8e54877f Fix unhandled KeyError during Notion setup (#114787) 2024-04-05 12:32:57 +02:00
Manuel Dipolt
a39e1a6428 Update romy to 0.0.10 (#114785) 2024-04-05 12:32:53 +02:00
Marc Mueller
450be67406 Update romy to 0.0.9 (#114360) 2024-04-05 10:14:00 +02:00
Åke Strandberg
25289e0ca1 Bump myuplink dependency to 0.6.0 (#114767) 2024-04-05 10:06:39 +02:00
Álvaro Fernández Rojas
d983fa6da7 Update aioairzone-cloud to v0.4.7 (#114761) 2024-04-05 10:06:35 +02:00
Franck Nijhof
b61397656c 2024.4.0 (#114764) 2024-04-03 20:38:11 +02:00
Jan-Philipp Benecke
590546a9a5 Use setup_test_component_platform helper for sensor entity component tests instead of hass.components (#114316)
* Use `setup_test_component_platform` helper for sensor entity component tests instead of `hass.components`

* Missing file

* Fix import

* Remove invalid device class
2024-04-03 20:00:56 +02:00
IngoK1
9ba4d26abd Fix for Sonos URL encoding problem #102557 (#109518)
* Fix for URL encoding problem #102557

Fixes the problem "Cannot play media with spaces in folder names to Sonos #102557" removing the encoding of the strings in the music library.

* Fix type casting problem

* Update media_browser.py to fix pr check findings

Added required casting for all unquote statements to avoid further casting findings in the pr checks

* Update media_browser.py

Checked on linting, lets give it another try

* Update media_browser.py

Updated ruff run

* Update media_browser.py - added version run through ruff

* Update media_browser.py - ruff changes

* Apply ruff formatting

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

* Update homeassistant/components/sonos/media_browser.py

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>

---------

Co-authored-by: computeq-admin <51021172+computeq-admin@users.noreply.github.com>
Co-authored-by: Jason Lawrence <jjlawren@users.noreply.github.com>
2024-04-03 19:31:02 +02:00
Franck Nijhof
aa33da546d Bump version to 2024.4.0 2024-04-03 19:09:39 +02:00
Franck Nijhof
3845523a27 Bump version to 2024.4.0b9 2024-04-03 17:55:24 +02:00
Michael
6a7fad0228 Fix Synology DSM setup in case no Surveillance Station permission (#114757) 2024-04-03 17:55:12 +02:00
Bram Kragten
33f07ce035 Update frontend to 20240403.1 (#114756) 2024-04-03 17:55:09 +02:00
Michael Hansen
4302c5c273 Bump intents (#114755) 2024-04-03 17:55:05 +02:00
Robert Resch
b2df1b1c03 Allow passing area/device/entity IDs to floor_id and floor_name (#114748) 2024-04-03 17:55:01 +02:00
Franck Nijhof
0aa134459b Bump version to 2024.4.0b8 2024-04-03 15:35:53 +02:00
Bram Kragten
0ca3700c16 Update frontend to 20240403.0 (#114747) 2024-04-03 15:35:40 +02:00
Joost Lekkerkerker
35ff633d99 Avoid blocking IO in downloader config flow (#114741) 2024-04-03 15:35:36 +02:00
Joost Lekkerkerker
7a2f6ce430 Fix Downloader config flow (#114718) 2024-04-03 15:35:32 +02:00
David F. Mulcahey
7cb603a226 Import zha quirks in the executor (#114685) 2024-04-03 15:35:29 +02:00
Jonas Fors Lellky
43562289e4 Bump flexit_bacnet to 2.2.1 (#114641) 2024-04-03 15:35:26 +02:00
Lenn
79fa7caa41 Rename Motionblinds BLE integration to Motionblinds Bluetooth (#114584) 2024-04-03 15:35:20 +02:00
Franck Nijhof
8bdb27c88b Bump version to 2024.4.0b7 2024-04-03 00:14:07 +02:00
Bram Kragten
f676448f27 Update frontend to 20240402.2 (#114683) 2024-04-03 00:13:57 +02:00
J. Nick Koston
639c4a843b Avoid trying to load platform that are known to not exist in async_prepare_setup_platform (#114659) 2024-04-03 00:13:53 +02:00
G Johansson
02dee34338 Bump holidays to 0.46 (#114657) 2024-04-03 00:13:49 +02:00
Maciej Bieniek
4e0290ce0e Add missing state to the Tractive tracker state sensor (#114654)
Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com>
2024-04-03 00:13:45 +02:00
Robert Svensson
fa2f49693c Bump aiounifi to v74 (#114649) 2024-04-03 00:13:39 +02:00
Pete Sage
2ce784105d Fix Sonos play imported playlists (#113934) 2024-04-03 00:13:35 +02:00
Franck Nijhof
85fb4a27a3 Bump version to 2024.4.0b6 2024-04-02 17:35:01 +02:00
Bram Kragten
8cbedbe26b Update frontend to 20240402.1 (#114646) 2024-04-02 17:34:29 +02:00
Steven B
5bd52da13a Bump ring_doorbell integration to 0.8.9 (#114631) 2024-04-02 17:33:24 +02:00
dotvav
d53848aae4 Fix Overkiz Hitachi OVP air-to-air heat pump (#114611) 2024-04-02 17:23:51 +02:00
puddly
4e0d6f287e Reduce ZHA OTA logbook entries and extraneous updates (#114591) 2024-04-02 17:23:45 +02:00
Franck Nijhof
5af5f3694e Bump version to 2024.4.0b5 2024-04-02 12:28:20 +02:00
Bram Kragten
b539b25682 Update frontend to 20240402.0 (#114627) 2024-04-02 12:28:07 +02:00
Fexiven
ca31479d29 Fix Starlink integration startup issue (#114615) 2024-04-02 12:28:04 +02:00
Franck Nijhof
92dfec3c98 Add floor selector (#114614) 2024-04-02 12:28:00 +02:00
max2697
230c29edbe Bump opower to 0.4.2 (#114608) 2024-04-02 12:27:57 +02:00
Jack Boswell
559fe65471 Catch potential ValueError when getting or setting Starlink sleep values (#114607) 2024-04-02 12:27:54 +02:00
mkmer
384d10a51d Add diagnostic platform to Whirlpool (#114578)
* Add diagnostic platform and tests

* lowercase variable

* Correc doc string
2024-04-02 12:27:50 +02:00
Brett Adams
e5a620545c Fix battery heater in Tessie (#114568) 2024-04-02 12:27:47 +02:00
Maciej Bieniek
7b84e86f89 Improve Shelly RPC device update progress (#114566)
Co-authored-by: Shay Levy <levyshay1@gmail.com>
Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com>
2024-04-02 12:27:44 +02:00
Joost Lekkerkerker
18b6de567d Bump roombapy to 1.8.1 (#114478)
* Bump roombapy to 1.7.0

* Bump

* Bump

* Fix
2024-04-02 12:27:40 +02:00
Pete Sage
a6076a0d33 Display sonos album title with URL encoding (#113693)
* unescape the title

When extracting the title from the item_id, it needs to be unescaped.

* sort imports
2024-04-02 12:27:36 +02:00
Paulus Schoutsen
7164993562 Bump version to 2024.4.0b4 2024-04-02 01:51:42 +00:00
mkmer
bc21836e7e Bump whirlpool-sixth-sense to 0.18.7 (#114606)
Bump sixth-sense to 0.18.7
2024-04-02 01:51:35 +00:00
J. Nick Koston
52612b10fd Avoid storing raw extracted traceback in system_log (#114603)
This is never actually used and takes up quite a bit of ram
2024-04-02 01:51:35 +00:00
J. Nick Koston
623d85ecaa Fix memory leak when importing a platform fails (#114602)
* Fix memory leak when importing a platform fails

re-raising ImportError would trigger a memory leak

* fixes, coverage

* Apply suggestions from code review
2024-04-02 01:51:33 +00:00
J. Nick Koston
43631d5944 Add missing platforms_exist guard to check_config (#114600)
* Add missing platforms_exist guard to check_config

related issue #112811

When the exception hits, the config will end up being saved in the traceback
so the memory is never released.

This matches the check_config code to homeassistant.config to avoid having
the exception thrown.

* patch

* merge branch
2024-04-02 01:51:33 +00:00
J. Nick Koston
112aab47fb Bump zeroconf to 0.132.0 (#114596)
changelog: https://github.com/python-zeroconf/python-zeroconf/compare/0.131.0...0.132.0
2024-04-02 01:51:32 +00:00
Martin Hjelmare
ea13f102e0 Fix reolink media source data access (#114593)
* Add test

* Fix reolink media source data access
2024-04-02 01:51:31 +00:00
jjlawren
bb33725e7f Bump plexapi to 4.15.11 (#114581) 2024-04-02 01:51:31 +00:00
Michael
bd6890ab83 Filter out ignored entries in ssdp step of AVM Fritz!SmartHome (#114574)
filter out ignored entries in ssdp step
2024-04-02 01:51:30 +00:00
Michael
25c611ffc4 Reduce usage of executer threads in AVM Fritz!Tools (#114570)
* call entity state update calls in one executer task

* remove not needed wrapping

* mark as "non-public" method

* add guard against changes on _entity_update_functions
2024-04-02 01:51:29 +00:00
Maikel Punie
fc24b61859 Bump velbusaio to 2024.4.0 (#114569)
Bump valbusaio to 2024.4.0
2024-04-02 01:51:28 +00:00
Joost Lekkerkerker
71588b5c22 Fix wrong icons (#114567)
* Fix wrong icons

* Fix wrong icons
2024-04-02 01:51:27 +00:00
Robert Svensson
14dfb6a255 Bump axis to v60 (#114544)
* Improve Axis MQTT support

* Bump axis to v60
2024-04-02 01:51:27 +00:00
G Johansson
ef97255d9c Fix server update from breaking setup in Speedtest.NET (#114524) 2024-04-02 01:51:26 +00:00
J. Nick Koston
e8afdd67d0 Fix workday doing blocking I/O in the event loop (#114492) 2024-04-02 01:51:25 +00:00
J. Nick Koston
008e4413b5 Fix late load of anyio doing blocking I/O in the event loop (#114491)
* Fix late load of anyio doing blocking I/O in the event loop

httpx loads anyio which loads the asyncio backend in the event loop
as soon as httpx makes the first request

* tweak
2024-04-02 01:51:24 +00:00
dotvav
c373d40e34 Fix Overkiz Hitachi OVP air-to-air heat pump (#114487)
Unpack command parameters instead of passing a list
2024-04-02 01:51:24 +00:00
J. Nick Koston
bdf51553ef Improve sonos test synchronization (#114468) 2024-04-02 01:51:23 +00:00
Michael Hansen
f2edc15687 Add initial support for floors to intents (#114456)
* Add initial support for floors to intents

* Fix climate intent

* More tests

* No return value

* Add requested changes

* Reuse event handler
2024-04-02 01:51:22 +00:00
J. Nick Koston
286a09d737 Mark executor jobs as background unless created from a tracked task (#114450)
* Mark executor jobs as background unless created from a tracked task

If the current task is not tracked the executor job should not
be a background task to avoid delaying startup and shutdown.

Currently any executor job created in a untracked task or
background task would end up being tracked and delaying
startup/shutdown

* import exec has the same issue

* Avoid tracking import executor jobs

There is no reason to track these jobs as they are always awaited
and we do not want to support fire and forget import executor jobs

* fix xiaomi_miio

* lots of fire time changed without background await

* revert changes moved to other PR

* more

* more

* more

* m

* m

* p

* fix fire and forget tests

* scrape

* sonos

* system

* more

* capture callback before block

* coverage

* more

* more races

* more races

* more

* missed some

* more fixes

* missed some more

* fix

* remove unneeded

* one more race

* two
2024-04-02 01:51:21 +00:00
Shay Levy
e8ee2fd25c Cleanup Shelly RGBW light entities (#114410) 2024-04-02 01:51:21 +00:00
174 changed files with 2348 additions and 862 deletions

View File

@@ -93,6 +93,11 @@ from .util.async_ import create_eager_task
from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_virtual_env
with contextlib.suppress(ImportError):
# Ensure anyio backend is imported to avoid it being imported in the event loop
from anyio._backends import _asyncio # noqa: F401
if TYPE_CHECKING:
from .runner import RuntimeConfig

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.4.6"]
"requirements": ["aioairzone-cloud==0.4.7"]
}

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from datetime import datetime
from functools import partial
import logging
from time import monotonic
from aiohttp import ClientError
from yalexs.activity import Activity, ActivityType
@@ -26,9 +27,11 @@ _LOGGER = logging.getLogger(__name__)
ACTIVITY_STREAM_FETCH_LIMIT = 10
ACTIVITY_CATCH_UP_FETCH_LIMIT = 2500
INITIAL_LOCK_RESYNC_TIME = 60
# If there is a storm of activity (ie lock, unlock, door open, door close, etc)
# we want to debounce the updates so we don't hammer the activity api too much.
ACTIVITY_DEBOUNCE_COOLDOWN = 3
ACTIVITY_DEBOUNCE_COOLDOWN = 4
@callback
@@ -62,6 +65,7 @@ class ActivityStream(AugustSubscriberMixin):
self.pubnub = pubnub
self._update_debounce: dict[str, Debouncer] = {}
self._update_debounce_jobs: dict[str, HassJob] = {}
self._start_time: float | None = None
@callback
def _async_update_house_id_later(self, debouncer: Debouncer, _: datetime) -> None:
@@ -70,6 +74,7 @@ class ActivityStream(AugustSubscriberMixin):
async def async_setup(self) -> None:
"""Token refresh check and catch up the activity stream."""
self._start_time = monotonic()
update_debounce = self._update_debounce
update_debounce_jobs = self._update_debounce_jobs
for house_id in self._house_ids:
@@ -140,11 +145,25 @@ class ActivityStream(AugustSubscriberMixin):
debouncer = self._update_debounce[house_id]
debouncer.async_schedule_call()
# Schedule two updates past the debounce time
# to ensure we catch the case where the activity
# api does not update right away and we need to poll
# it again. Sometimes the lock operator or a doorbell
# will not show up in the activity stream right away.
# Only do additional polls if we are past
# the initial lock resync time to avoid a storm
# of activity at setup.
if (
not self._start_time
or monotonic() - self._start_time < INITIAL_LOCK_RESYNC_TIME
):
_LOGGER.debug(
"Skipping additional updates due to ongoing initial lock resync time"
)
return
_LOGGER.debug("Scheduling additional updates for house id %s", house_id)
job = self._update_debounce_jobs[house_id]
for step in (1, 2):
future_updates.append(

View File

@@ -40,7 +40,7 @@ ATTR_OPERATION_TAG = "tag"
# Limit battery, online, and hardware updates to hourly
# in order to reduce the number of api requests and
# avoid hitting rate limits
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=1)
MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=24)
# Activity needs to be checked more frequently as the
# doorbell motion and rings are included here

View File

@@ -49,9 +49,17 @@ class AugustSubscriberMixin:
"""Call the refresh method."""
self._hass.async_create_task(self._async_refresh(now), eager_start=True)
@callback
def _async_cancel_update_interval(self, _: Event | None = None) -> None:
"""Cancel the scheduled update."""
if self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
@callback
def _async_setup_listeners(self) -> None:
"""Create interval and stop listeners."""
self._async_cancel_update_interval()
self._unsub_interval = async_track_time_interval(
self._hass,
self._async_scheduled_refresh,
@@ -59,17 +67,12 @@ class AugustSubscriberMixin:
name="august refresh",
)
@callback
def _async_cancel_update_interval(_: Event) -> None:
self._stop_interval = None
if self._unsub_interval:
self._unsub_interval()
self._stop_interval = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP,
_async_cancel_update_interval,
run_immediately=True,
)
if not self._stop_interval:
self._stop_interval = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP,
self._async_cancel_update_interval,
run_immediately=True,
)
@callback
def async_unsubscribe_device_id(
@@ -82,13 +85,7 @@ class AugustSubscriberMixin:
if self._subscriptions:
return
if self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
if self._stop_interval:
self._stop_interval()
self._stop_interval = None
self._async_cancel_update_interval()
@callback
def async_signal_device_id_update(self, device_id: str) -> None:

View File

@@ -56,6 +56,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
mjpeg_url=self.mjpeg_source,
still_image_url=self.image_source,
authentication=HTTP_DIGEST_AUTHENTICATION,
verify_ssl=False,
unique_id=f"{hub.unique_id}-camera",
)
@@ -74,16 +75,18 @@ class AxisCamera(AxisEntity, MjpegCamera):
Additionally used when device change IP address.
"""
proto = self.hub.config.protocol
host = self.hub.config.host
port = self.hub.config.port
image_options = self.generate_options(skip_stream_profile=True)
self._still_image_url = (
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
f"/jpg/image.cgi{image_options}"
f"{proto}://{host}:{port}/axis-cgi/jpg/image.cgi{image_options}"
)
mjpeg_options = self.generate_options()
self._mjpeg_url = (
f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi"
f"/mjpg/video.cgi{mjpeg_options}"
f"{proto}://{host}:{port}/axis-cgi/mjpg/video.cgi{mjpeg_options}"
)
stream_options = self.generate_options(add_video_codec_h264=True)
@@ -95,10 +98,7 @@ class AxisCamera(AxisEntity, MjpegCamera):
self.hub.additional_diagnostics["camera_sources"] = {
"Image": self._still_image_url,
"MJPEG": self._mjpeg_url,
"Stream": (
f"rtsp://user:pass@{self.hub.config.host}/axis-media"
f"/media.amp{stream_options}"
),
"Stream": (f"rtsp://user:pass@{host}/axis-media/media.amp{stream_options}"),
}
@property

View File

@@ -168,16 +168,13 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
self, entry_data: Mapping[str, Any], keep_password: bool
) -> ConfigFlowResult:
"""Re-run configuration step."""
protocol = entry_data.get(CONF_PROTOCOL, "http")
password = entry_data[CONF_PASSWORD] if keep_password else ""
self.discovery_schema = {
vol.Required(
CONF_PROTOCOL, default=entry_data.get(CONF_PROTOCOL, "http")
): str,
vol.Required(CONF_PROTOCOL, default=protocol): vol.In(PROTOCOL_CHOICES),
vol.Required(CONF_HOST, default=entry_data[CONF_HOST]): str,
vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str,
vol.Required(
CONF_PASSWORD,
default=entry_data[CONF_PASSWORD] if keep_password else "",
): str,
vol.Required(CONF_PASSWORD, default=password): str,
vol.Required(CONF_PORT, default=entry_data[CONF_PORT]): int,
}

View File

@@ -12,6 +12,7 @@ from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_PROTOCOL,
CONF_TRIGGER_TIME,
CONF_USERNAME,
)
@@ -31,6 +32,7 @@ class AxisConfig:
entry: ConfigEntry
protocol: str
host: str
port: int
username: str
@@ -54,6 +56,7 @@ class AxisConfig:
options = config_entry.options
return cls(
entry=config_entry,
protocol=config.get(CONF_PROTOCOL, "http"),
host=config[CONF_HOST],
username=config[CONF_USERNAME],
password=config[CONF_PASSWORD],

View File

@@ -116,7 +116,7 @@ class AxisHub:
if status.status.state == ClientState.ACTIVE:
self.config.entry.async_on_unload(
await mqtt.async_subscribe(
hass, f"{self.api.vapix.serial_number}/#", self.mqtt_message
hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message
)
)
@@ -124,7 +124,8 @@ class AxisHub:
def mqtt_message(self, message: ReceiveMessage) -> None:
"""Receive Axis MQTT message."""
self.disconnect_from_stream()
if message.topic.endswith("event/connection"):
return
event = mqtt_json_to_event(message.payload)
self.api.event.handler(event)

View File

@@ -26,7 +26,7 @@
"iot_class": "local_push",
"loggers": ["axis"],
"quality_scale": "platinum",
"requirements": ["axis==59"],
"requirements": ["axis==60"],
"ssdp": [
{
"manufacturer": "AXIS"

View File

@@ -58,6 +58,7 @@ class GetTemperatureIntent(intent.IntentHandler):
raise intent.NoStatesMatchedError(
name=entity_text or entity_name,
area=area_name or area_id,
floor=None,
domains={DOMAIN},
device_classes=None,
)
@@ -75,6 +76,7 @@ class GetTemperatureIntent(intent.IntentHandler):
raise intent.NoStatesMatchedError(
name=entity_name,
area=None,
floor=None,
domains={DOMAIN},
device_classes=None,
)

View File

@@ -34,6 +34,7 @@ from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
floor_registry as fr,
intent,
start,
template,
@@ -163,7 +164,12 @@ class DefaultAgent(AbstractConversationAgent):
self.hass.bus.async_listen(
ar.EVENT_AREA_REGISTRY_UPDATED,
self._async_handle_area_registry_changed,
self._async_handle_area_floor_registry_changed,
run_immediately=True,
)
self.hass.bus.async_listen(
fr.EVENT_FLOOR_REGISTRY_UPDATED,
self._async_handle_area_floor_registry_changed,
run_immediately=True,
)
self.hass.bus.async_listen(
@@ -696,10 +702,13 @@ class DefaultAgent(AbstractConversationAgent):
return lang_intents
@core.callback
def _async_handle_area_registry_changed(
self, event: core.Event[ar.EventAreaRegistryUpdatedData]
def _async_handle_area_floor_registry_changed(
self,
event: core.Event[
ar.EventAreaRegistryUpdatedData | fr.EventFloorRegistryUpdatedData
],
) -> None:
"""Clear area area cache when the area registry has changed."""
"""Clear area/floor list cache when the area registry has changed."""
self._slot_lists = None
@core.callback
@@ -773,6 +782,8 @@ class DefaultAgent(AbstractConversationAgent):
# Default name
entity_names.append((state.name, state.name, context))
_LOGGER.debug("Exposed entities: %s", entity_names)
# Expose all areas.
#
# We pass in area id here with the expectation that no two areas will
@@ -788,11 +799,25 @@ class DefaultAgent(AbstractConversationAgent):
area_names.append((alias, area.id))
_LOGGER.debug("Exposed entities: %s", entity_names)
# Expose all floors.
#
# We pass in floor id here with the expectation that no two floors will
# share the same name or alias.
floors = fr.async_get(self.hass)
floor_names = []
for floor in floors.async_list_floors():
floor_names.append((floor.name, floor.floor_id))
if floor.aliases:
for alias in floor.aliases:
if not alias.strip():
continue
floor_names.append((alias, floor.floor_id))
self._slot_lists = {
"area": TextSlotList.from_tuples(area_names, allow_template=False),
"name": TextSlotList.from_tuples(entity_names, allow_template=False),
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
}
return self._slot_lists
@@ -953,6 +978,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
# area only
return ErrorKey.NO_AREA, {"area": unmatched_area}
if unmatched_floor := unmatched_text.get("floor"):
# floor only
return ErrorKey.NO_FLOOR, {"floor": unmatched_floor}
# Area may still have matched
matched_area: str | None = None
if matched_area_entity := result.entities.get("area"):
@@ -1000,6 +1029,13 @@ def _get_no_states_matched_response(
"area": no_states_error.area,
}
if no_states_error.floor:
# domain in floor
return ErrorKey.NO_DOMAIN_IN_FLOOR, {
"domain": domain,
"floor": no_states_error.floor,
}
# domain only
return ErrorKey.NO_DOMAIN, {"domain": domain}

View File

@@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.27"]
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"]
}

View File

@@ -11,7 +11,11 @@ import requests
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import (
DOMAIN as HOMEASSISTANT_DOMAIN,
HomeAssistant,
ServiceCall,
)
from homeassistant.data_entry_flow import FlowResultType
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
@@ -43,6 +47,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if DOMAIN not in config:
return True
hass.async_create_task(_async_import_config(hass, config))
return True
async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None:
"""Import the Downloader component from the YAML file."""
import_result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
@@ -51,28 +62,40 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
},
)
translation_key = "deprecated_yaml"
if (
import_result["type"] == FlowResultType.ABORT
and import_result["reason"] == "import_failed"
and import_result["reason"] != "single_instance_allowed"
):
translation_key = "import_failed"
async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.9.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key=translation_key,
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
},
)
return True
async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.10.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="directory_does_not_exist",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
"url": "/config/integrations/dashboard/add?domain=downloader",
},
)
else:
async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.10.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Downloader",
},
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@@ -83,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if not os.path.isabs(download_path):
download_path = hass.config.path(download_path)
if not os.path.isdir(download_path):
if not await hass.async_add_executor_job(os.path.isdir, download_path):
_LOGGER.error(
"Download path %s does not exist. File Downloader not active", download_path
)

View File

@@ -46,19 +46,24 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_import(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
"""Handle a flow initiated by configuration file."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return await self.async_step_user(user_input)
try:
await self._validate_input(user_input)
except DirectoryDoesNotExist:
return self.async_abort(reason="directory_does_not_exist")
return self.async_create_entry(title=DEFAULT_NAME, data=user_input)
async def _validate_input(self, user_input: dict[str, Any]) -> None:
"""Validate the user input if the directory exists."""
if not os.path.isabs(user_input[CONF_DOWNLOAD_DIR]):
download_path = self.hass.config.path(user_input[CONF_DOWNLOAD_DIR])
download_path = user_input[CONF_DOWNLOAD_DIR]
if not os.path.isabs(download_path):
download_path = self.hass.config.path(download_path)
if not os.path.isdir(download_path):
if not await self.hass.async_add_executor_job(os.path.isdir, download_path):
_LOGGER.error(
"Download path %s does not exist. File Downloader not active",
download_path,

View File

@@ -37,13 +37,9 @@
}
},
"issues": {
"deprecated_yaml": {
"title": "The {integration_title} YAML configuration is being removed",
"description": "Configuring {integration_title} using YAML is being removed.\n\nYour configuration is already imported.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
},
"import_failed": {
"directory_does_not_exist": {
"title": "The {integration_title} failed to import",
"description": "The {integration_title} integration failed to import.\n\nPlease check the logs for more details."
"description": "The {integration_title} integration failed to import because the configured directory does not exist.\n\nEnsure the directory exists and restart Home Assistant to try again or remove the {integration_title} configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
}
}
}

View File

@@ -1,7 +1,7 @@
{
"services": {
"restart": "mdi:restart",
"start": "mdi:start",
"start": "mdi:play",
"stop": "mdi:stop"
}
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["flexit_bacnet==2.1.0"]
"requirements": ["flexit_bacnet==2.2.1"]
}

View File

@@ -311,6 +311,17 @@ class FritzBoxTools(
)
return unregister_entity_updates
def _entity_states_update(self) -> dict:
"""Run registered entity update calls."""
entity_states = {}
for key in list(self._entity_update_functions):
if (update_fn := self._entity_update_functions.get(key)) is not None:
_LOGGER.debug("update entity %s", key)
entity_states[key] = update_fn(
self.fritz_status, self.data["entity_states"].get(key)
)
return entity_states
async def _async_update_data(self) -> UpdateCoordinatorDataType:
"""Update FritzboxTools data."""
entity_data: UpdateCoordinatorDataType = {
@@ -319,15 +330,9 @@ class FritzBoxTools(
}
try:
await self.async_scan_devices()
for key in list(self._entity_update_functions):
_LOGGER.debug("update entity %s", key)
entity_data["entity_states"][
key
] = await self.hass.async_add_executor_job(
self._entity_update_functions[key],
self.fritz_status,
self.data["entity_states"].get(key),
)
entity_data["entity_states"] = await self.hass.async_add_executor_job(
self._entity_states_update
)
if self.has_call_deflections:
entity_data[
"call_deflections"

View File

@@ -141,7 +141,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="already_in_progress")
# update old and user-configured config entries
for entry in self._async_current_entries():
for entry in self._async_current_entries(include_ignore=False):
if entry.data[CONF_HOST] == host:
if uuid and not entry.unique_id:
self.hass.config_entries.async_update_entry(entry, unique_id=uuid)

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20240329.1"]
"requirements": ["home-assistant-frontend==20240404.1"]
}

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.45", "babel==2.13.1"]
"requirements": ["holidays==0.46", "babel==2.13.1"]
}

View File

@@ -1,6 +1,6 @@
{
"services": {
"select_next": "mdi:skip",
"select_next": "mdi:skip-next",
"select_option": "mdi:check",
"select_previous": "mdi:skip-previous",
"select_first": "mdi:skip-backward",

View File

@@ -179,7 +179,7 @@ async def _get_dashboard_info(hass, url_path):
"views": views,
}
if config is None:
if config is None or "views" not in config:
return data
for idx, view in enumerate(config["views"]):

View File

@@ -141,7 +141,7 @@ class LutronLight(LutronDevice, LightEntity):
else:
brightness = self._prev_brightness
self._prev_brightness = brightness
args = {"new_level": brightness}
args = {"new_level": to_lutron_level(brightness)}
if ATTR_TRANSITION in kwargs:
args["fade_time_seconds"] = kwargs[ATTR_TRANSITION]
self._lutron_device.set_level(**args)

View File

@@ -52,7 +52,7 @@
"unjoin": "mdi:ungroup",
"volume_down": "mdi:volume-minus",
"volume_mute": "mdi:volume-mute",
"volume_set": "mdi:volume",
"volume_set": "mdi:volume-medium",
"volume_up": "mdi:volume-plus"
}
}

View File

@@ -1,4 +1,4 @@
"""Motionblinds BLE integration."""
"""Motionblinds Bluetooth integration."""
from __future__ import annotations
@@ -34,9 +34,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Motionblinds BLE integration."""
"""Set up Motionblinds Bluetooth integration."""
_LOGGER.debug("Setting up Motionblinds BLE integration")
_LOGGER.debug("Setting up Motionblinds Bluetooth integration")
# The correct time is needed for encryption
_LOGGER.debug("Setting timezone for encryption: %s", hass.config.time_zone)
@@ -46,7 +46,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Motionblinds BLE device from a config entry."""
"""Set up Motionblinds Bluetooth device from a config entry."""
_LOGGER.debug("(%s) Setting up device", entry.data[CONF_MAC_CODE])
@@ -94,7 +94,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Motionblinds BLE device from a config entry."""
"""Unload Motionblinds Bluetooth device from a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)

View File

@@ -1,4 +1,4 @@
"""Button entities for the Motionblinds BLE integration."""
"""Button entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Config flow for Motionblinds BLE integration."""
"""Config flow for Motionblinds Bluetooth integration."""
from __future__ import annotations
@@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str})
class FlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Motionblinds BLE."""
"""Handle a config flow for Motionblinds Bluetooth."""
def __init__(self) -> None:
"""Initialize a ConfigFlow."""

View File

@@ -1,4 +1,4 @@
"""Constants for the Motionblinds BLE integration."""
"""Constants for the Motionblinds Bluetooth integration."""
ATTR_CONNECT = "connect"
ATTR_DISCONNECT = "disconnect"

View File

@@ -1,4 +1,4 @@
"""Cover entities for the Motionblinds BLE integration."""
"""Cover entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Base entities for the Motionblinds BLE integration."""
"""Base entities for the Motionblinds Bluetooth integration."""
import logging
@@ -16,7 +16,7 @@ _LOGGER = logging.getLogger(__name__)
class MotionblindsBLEEntity(Entity):
"""Base class for Motionblinds BLE entities."""
"""Base class for Motionblinds Bluetooth entities."""
_attr_has_entity_name = True
_attr_should_poll = False

View File

@@ -1,6 +1,6 @@
{
"domain": "motionblinds_ble",
"name": "Motionblinds BLE",
"name": "Motionblinds Bluetooth",
"bluetooth": [
{
"local_name": "MOTION_*",

View File

@@ -1,4 +1,4 @@
"""Select entities for the Motionblinds BLE integration."""
"""Select entities for the Motionblinds Bluetooth integration."""
from __future__ import annotations

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from http import HTTPStatus
from aiohttp import ClientError, ClientResponseError
from myuplink import MyUplinkAPI, get_manufacturer, get_system_name
from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
@@ -92,7 +92,7 @@ def create_devices(
identifiers={(DOMAIN, device_id)},
name=get_system_name(system),
manufacturer=get_manufacturer(device),
model=device.productName,
model=get_model(device),
sw_version=device.firmwareCurrent,
serial_number=device.product_serial_number,
)

View File

@@ -6,5 +6,5 @@
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/myuplink",
"iot_class": "cloud_polling",
"requirements": ["myuplink==0.5.0"]
"requirements": ["myuplink==0.6.0"]
}

View File

@@ -108,7 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
(CONF_REFRESH_TOKEN, client.refresh_token),
(CONF_USER_UUID, client.user_uuid),
):
if entry.data[key] == value:
if entry.data.get(key) == value:
continue
entry_updates["data"][key] = value

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/opower",
"iot_class": "cloud_polling",
"loggers": ["opower"],
"requirements": ["opower==0.4.1"]
"requirements": ["opower==0.4.3"]
}

View File

@@ -298,6 +298,11 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
OverkizState.OVP_FAN_SPEED,
OverkizCommandParam.AUTO,
)
# Sanitize fan mode: Overkiz is sometimes providing a state that
# cannot be used as a command. Convert it to HA space and back to Overkiz
if fan_mode not in FAN_MODES_TO_OVERKIZ.values():
fan_mode = FAN_MODES_TO_OVERKIZ[OVERKIZ_TO_FAN_MODES[fan_mode]]
hvac_mode = self._control_backfill(
hvac_mode,
OverkizState.OVP_MODE_CHANGE,
@@ -357,5 +362,5 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
]
await self.executor.async_execute_command(
OverkizCommand.GLOBAL_CONTROL, command_data
OverkizCommand.GLOBAL_CONTROL, *command_data
)

View File

@@ -8,7 +8,7 @@
"iot_class": "local_push",
"loggers": ["plexapi", "plexwebsocket"],
"requirements": [
"PlexAPI==4.15.10",
"PlexAPI==4.15.11",
"plexauth==0.0.6",
"plexwebsocket==0.0.14"
],

View File

@@ -715,6 +715,7 @@ class Statistics(Base, StatisticsBase):
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS
@@ -732,6 +733,7 @@ class StatisticsShortTerm(Base, StatisticsBase):
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS_SHORT_TERM
@@ -760,7 +762,10 @@ class StatisticsMeta(Base):
class RecorderRuns(Base):
"""Representation of recorder run."""
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
__table_args__ = (
Index("ix_recorder_runs_start_end", "start", "end"),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_RECORDER_RUNS
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
@@ -789,6 +794,7 @@ class MigrationChanges(Base):
"""Representation of migration changes."""
__tablename__ = TABLE_MIGRATION_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
migration_id: Mapped[str] = mapped_column(String(255), primary_key=True)
version: Mapped[int] = mapped_column(SmallInteger)
@@ -798,6 +804,8 @@ class SchemaChanges(Base):
"""Representation of schema version changes."""
__tablename__ = TABLE_SCHEMA_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
schema_version: Mapped[int | None] = mapped_column(Integer)
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
@@ -816,6 +824,8 @@ class StatisticsRuns(Base):
"""Representation of statistics run."""
__tablename__ = TABLE_STATISTICS_RUNS
__table_args__ = (_DEFAULT_TABLE_ARGS,)
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)

View File

@@ -46,7 +46,6 @@ class ReolinkVODMediaSource(MediaSource):
"""Initialize ReolinkVODMediaSource."""
super().__init__(DOMAIN)
self.hass = hass
self.data: dict[str, ReolinkData] = hass.data[DOMAIN]
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
@@ -57,7 +56,8 @@ class ReolinkVODMediaSource(MediaSource):
_, config_entry_id, channel_str, stream_res, filename = identifier
channel = int(channel_str)
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
vod_type = VodRequestType.RTMP
if host.api.is_nvr:
@@ -130,7 +130,8 @@ class ReolinkVODMediaSource(MediaSource):
if config_entry.state != ConfigEntryState.LOADED:
continue
channels: list[str] = []
host = self.data[config_entry.entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry.entry_id].host
entities = er.async_entries_for_config_entry(
entity_reg, config_entry.entry_id
)
@@ -187,7 +188,8 @@ class ReolinkVODMediaSource(MediaSource):
self, config_entry_id: str, channel: int
) -> BrowseMediaSource:
"""Allow the user to select the high or low playback resolution, (low loads faster)."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
main_enc = await host.api.get_encoding(channel, "main")
if main_enc == "h265":
@@ -236,7 +238,8 @@ class ReolinkVODMediaSource(MediaSource):
self, config_entry_id: str, channel: int, stream: str
) -> BrowseMediaSource:
"""Return all days on which recordings are available for a reolink camera."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
# We want today of the camera, not necessarily today of the server
now = host.api.time() or await host.api.async_get_time()
@@ -288,7 +291,8 @@ class ReolinkVODMediaSource(MediaSource):
day: int,
) -> BrowseMediaSource:
"""Return all recording files on a specific day of a Reolink camera."""
host = self.data[config_entry_id].host
data: dict[str, ReolinkData] = self.hass.data[DOMAIN]
host = data[config_entry_id].host
start = dt.datetime(year, month, day, hour=0, minute=0, second=0)
end = dt.datetime(year, month, day, hour=23, minute=59, second=59)

View File

@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/ring",
"iot_class": "cloud_polling",
"loggers": ["ring_doorbell"],
"requirements": ["ring-doorbell[listen]==0.8.8"]
"requirements": ["ring-doorbell[listen]==0.8.9"]
}

View File

@@ -5,6 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/romy",
"iot_class": "local_polling",
"requirements": ["romy==0.0.7"],
"requirements": ["romy==0.0.10"],
"zeroconf": ["_aicu-http._tcp.local."]
}

View File

@@ -24,7 +24,7 @@
"documentation": "https://www.home-assistant.io/integrations/roomba",
"iot_class": "local_push",
"loggers": ["paho_mqtt", "roombapy"],
"requirements": ["roombapy==1.6.13"],
"requirements": ["roombapy==1.8.1"],
"zeroconf": [
{
"type": "_amzn-alexa._tcp.local.",

View File

@@ -54,7 +54,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string,
vol.Optional(CONF_NAME, default="Rova"): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
cv.ensure_list, [vol.In(["bio", "paper", "plastic", "residual"])]
),
}
)

View File

@@ -234,3 +234,5 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = (
)
CONF_GEN = "gen"
SHELLY_PLUS_RGBW_CHANNELS = 4

View File

@@ -14,6 +14,7 @@ from homeassistant.components.light import (
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_TRANSITION,
DOMAIN as LIGHT_DOMAIN,
ColorMode,
LightEntity,
LightEntityFeature,
@@ -34,12 +35,14 @@ from .const import (
RGBW_MODELS,
RPC_MIN_TRANSITION_TIME_SEC,
SHBLB_1_RGB_EFFECTS,
SHELLY_PLUS_RGBW_CHANNELS,
STANDARD_RGB_EFFECTS,
)
from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data
from .entity import ShellyBlockEntity, ShellyRpcEntity
from .utils import (
async_remove_shelly_entity,
async_remove_shelly_rpc_entities,
brightness_to_percentage,
get_device_entry_gen,
get_rpc_key_ids,
@@ -118,14 +121,28 @@ def async_setup_rpc_entry(
return
if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"):
# Light mode remove RGB & RGBW entities, add light entities
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"]
)
async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids)
return
light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)]
if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"):
# RGB mode remove light & RGBW entities, add RGB entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"]
)
async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids)
return
if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"):
# RGBW mode remove light & RGB entities, add RGBW entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"]
)
async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids)

View File

@@ -222,7 +222,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
) -> None:
"""Initialize update entity."""
super().__init__(coordinator, key, attribute, description)
self._ota_in_progress: bool = False
self._ota_in_progress: bool | int = False
self._attr_release_url = get_release_url(
coordinator.device.gen, coordinator.model, description.beta
)
@@ -237,14 +237,13 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
@callback
def _ota_progress_callback(self, event: dict[str, Any]) -> None:
"""Handle device OTA progress."""
if self._ota_in_progress:
if self.in_progress is not False:
event_type = event["event"]
if event_type == OTA_BEGIN:
self._attr_in_progress = 0
self._ota_in_progress = 0
elif event_type == OTA_PROGRESS:
self._attr_in_progress = event["progress_percent"]
self._ota_in_progress = event["progress_percent"]
elif event_type in (OTA_ERROR, OTA_SUCCESS):
self._attr_in_progress = False
self._ota_in_progress = False
self.async_write_ha_state()
@@ -262,6 +261,11 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
return self.installed_version
@property
def in_progress(self) -> bool | int:
"""Update installation in progress."""
return self._ota_in_progress
async def async_install(
self, version: str | None, backup: bool, **kwargs: Any
) -> None:
@@ -292,7 +296,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
await self.coordinator.async_shutdown_device_and_start_reauth()
else:
self._ota_in_progress = True
LOGGER.debug("OTA update call successful")
LOGGER.info("OTA update call for %s successful", self.coordinator.name)
class RpcSleepingUpdateEntity(

View File

@@ -488,3 +488,15 @@ async def async_shutdown_device(device: BlockDevice | RpcDevice) -> None:
await device.shutdown()
if isinstance(device, BlockDevice):
device.shutdown()
@callback
def async_remove_shelly_rpc_entities(
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
) -> None:
"""Remove RPC based Shelly entity."""
entity_reg = er_async_get(hass)
for key in keys:
if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"):
LOGGER.debug("Removing entity: %s", entity_id)
entity_reg.async_remove(entity_id)

View File

@@ -270,7 +270,7 @@ class SnmpData:
"SNMP OID %s received type=%s and data %s",
self._baseoid,
type(value),
bytes(value),
value,
)
if isinstance(value, NoSuchObject):
_LOGGER.error(

View File

@@ -7,6 +7,7 @@ from contextlib import suppress
from functools import partial
import logging
from typing import cast
import urllib.parse
from soco.data_structures import DidlObject
from soco.ms_data_structures import MusicServiceItem
@@ -60,12 +61,14 @@ def get_thumbnail_url_full(
media_content_id,
media_content_type,
)
return getattr(item, "album_art_uri", None)
return urllib.parse.unquote(getattr(item, "album_art_uri", ""))
return get_browse_image_url(
media_content_type,
media_content_id,
media_image_id,
return urllib.parse.unquote(
get_browse_image_url(
media_content_type,
media_content_id,
media_image_id,
)
)
@@ -166,6 +169,7 @@ def build_item_response(
payload["idstring"] = "A:ALBUMARTIST/" + "/".join(
payload["idstring"].split("/")[2:]
)
payload["idstring"] = urllib.parse.unquote(payload["idstring"])
try:
search_type = MEDIA_TYPES_TO_SONOS[payload["search_type"]]
@@ -201,7 +205,7 @@ def build_item_response(
if not title:
try:
title = payload["idstring"].split("/")[1]
title = urllib.parse.unquote(payload["idstring"].split("/")[1])
except IndexError:
title = LIBRARY_TITLES_MAPPING[payload["idstring"]]
@@ -493,10 +497,24 @@ def get_media(
"""Fetch media/album."""
search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type)
if search_type == "playlists":
# Format is S:TITLE or S:ITEM_ID
splits = item_id.split(":")
title = splits[1] if len(splits) > 1 else None
playlist = next(
(
p
for p in media_library.get_playlists()
if (item_id == p.item_id or title == p.title)
),
None,
)
return playlist
if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM:
item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:])
search_term = item_id.split("/")[-1]
search_term = urllib.parse.unquote(item_id.split("/")[-1])
matches = media_library.get_music_library_information(
search_type, search_term=search_term, full_album_art_uri=True
)

View File

@@ -626,13 +626,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
soco.play_uri(media_id, force_radio=is_radio)
elif media_type == MediaType.PLAYLIST:
if media_id.startswith("S:"):
item = media_browser.get_media(self.media.library, media_id, media_type)
soco.play_uri(item.get_uri())
return
try:
playlist = media_browser.get_media(
self.media.library, media_id, media_type
)
else:
playlists = soco.get_sonos_playlists(complete_result=True)
playlist = next(p for p in playlists if p.title == media_id)
except StopIteration:
playlist = next((p for p in playlists if p.title == media_id), None)
if not playlist:
_LOGGER.error('Could not find a Sonos playlist named "%s"', media_id)
else:
soco.clear_queue()

View File

@@ -25,10 +25,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
partial(speedtest.Speedtest, secure=True)
)
coordinator = SpeedTestDataCoordinator(hass, config_entry, api)
await hass.async_add_executor_job(coordinator.update_servers)
except speedtest.SpeedtestException as err:
raise ConfigEntryNotReady from err
hass.data[DOMAIN] = coordinator
async def _async_finish_startup(hass: HomeAssistant) -> None:
"""Run this only when HA has finished its startup."""
await coordinator.async_config_entry_first_refresh()
@@ -36,8 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
# Don't start a speedtest during startup
async_at_started(hass, _async_finish_startup)
hass.data[DOMAIN] = coordinator
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
config_entry.async_on_unload(config_entry.add_update_listener(update_listener))

View File

@@ -58,14 +58,14 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]):
async def _async_update_data(self) -> StarlinkData:
async with asyncio.timeout(4):
try:
status, location, sleep = await asyncio.gather(
self.hass.async_add_executor_job(status_data, self.channel_context),
self.hass.async_add_executor_job(
location_data, self.channel_context
),
self.hass.async_add_executor_job(
get_sleep_config, self.channel_context
),
status = await self.hass.async_add_executor_job(
status_data, self.channel_context
)
location = await self.hass.async_add_executor_job(
location_data, self.channel_context
)
sleep = await self.hass.async_add_executor_job(
get_sleep_config, self.channel_context
)
return StarlinkData(location, sleep, *status)
except GrpcError as exc:

View File

@@ -10,6 +10,7 @@ import math
from homeassistant.components.time import TimeEntity, TimeEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
@@ -62,14 +63,22 @@ class StarlinkTimeEntity(StarlinkEntity, TimeEntity):
def _utc_minutes_to_time(utc_minutes: int, timezone: tzinfo) -> time:
hour = math.floor(utc_minutes / 60)
minute = utc_minutes % 60
utc = datetime.now(UTC).replace(hour=hour, minute=minute, second=0, microsecond=0)
try:
utc = datetime.now(UTC).replace(
hour=hour, minute=minute, second=0, microsecond=0
)
except ValueError as exc:
raise HomeAssistantError from exc
return utc.astimezone(timezone).time()
def _time_to_utc_minutes(t: time, timezone: tzinfo) -> int:
zoned_time = datetime.now(timezone).replace(
hour=t.hour, minute=t.minute, second=0, microsecond=0
)
try:
zoned_time = datetime.now(timezone).replace(
hour=t.hour, minute=t.minute, second=0, microsecond=0
)
except ValueError as exc:
raise HomeAssistantError from exc
utc_time = zoned_time.astimezone(UTC).time()
return (utc_time.hour * 60) + utc_time.minute

View File

@@ -105,6 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if (
SynoSurveillanceStation.INFO_API_KEY in available_apis
and SynoSurveillanceStation.HOME_MODE_API_KEY in available_apis
and api.surveillance_station is not None
):
coordinator_switches = SynologyDSMSwitchUpdateCoordinator(hass, entry, api)
await coordinator_switches.async_config_entry_first_refresh()

View File

@@ -75,7 +75,7 @@
}
},
"services": {
"reboot": "mdi:reboot",
"reboot": "mdi:restart",
"shutdown": "mdi:power"
}
}

View File

@@ -10,6 +10,6 @@
"iot_class": "local_push",
"loggers": ["systembridgeconnector"],
"quality_scale": "silver",
"requirements": ["systembridgeconnector==4.0.3"],
"requirements": ["systembridgeconnector==4.0.3", "systembridgemodels==4.0.4"],
"zeroconf": ["_system-bridge._tcp.local."]
}

View File

@@ -166,7 +166,6 @@ class LogEntry:
"level",
"message",
"exception",
"extracted_tb",
"root_cause",
"source",
"count",
@@ -200,7 +199,6 @@ class LogEntry:
else:
self.source = (record.pathname, record.lineno)
self.count = 1
self.extracted_tb = extracted_tb
self.key = (self.name, self.source, self.root_cause)
def to_dict(self) -> dict[str, Any]:

View File

@@ -34,7 +34,7 @@ DESCRIPTIONS: tuple[TessieBinarySensorEntityDescription, ...] = (
is_on=lambda x: x == TessieState.ONLINE,
),
TessieBinarySensorEntityDescription(
key="charge_state_battery_heater_on",
key="climate_state_battery_heater",
device_class=BinarySensorDeviceClass.HEAT,
entity_category=EntityCategory.DIAGNOSTIC,
),

View File

@@ -252,7 +252,7 @@
"state": {
"name": "Status"
},
"charge_state_battery_heater_on": {
"climate_state_battery_heater": {
"name": "Battery heater"
},
"charge_state_charge_enable_request": {

View File

@@ -1,6 +1,6 @@
{
"services": {
"start": "mdi:start",
"start": "mdi:play",
"pause": "mdi:pause",
"cancel": "mdi:cancel",
"finish": "mdi:check",

View File

@@ -107,6 +107,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.ENUM,
options=[
"inaccurate_position",
"not_reporting",
"operational",
"system_shutdown_user",

View File

@@ -70,6 +70,7 @@
"tracker_state": {
"name": "Tracker state",
"state": {
"inaccurate_position": "Inaccurate position",
"not_reporting": "Not reporting",
"operational": "Operational",
"system_shutdown_user": "System shutdown user",

View File

@@ -8,7 +8,7 @@
"iot_class": "local_push",
"loggers": ["aiounifi"],
"quality_scale": "platinum",
"requirements": ["aiounifi==73"],
"requirements": ["aiounifi==74"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -13,7 +13,7 @@
"velbus-packet",
"velbus-protocol"
],
"requirements": ["velbus-aio==2023.12.0"],
"requirements": ["velbus-aio==2024.4.0"],
"usb": [
{
"vid": "10CF",

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud",
"iot_class": "cloud_polling",
"requirements": ["weatherflow4py==0.2.17"]
"requirements": ["weatherflow4py==0.2.20"]
}

View File

@@ -0,0 +1,49 @@
"""Diagnostics support for Whirlpool."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from . import WhirlpoolData
from .const import DOMAIN
TO_REDACT = {
"SERIAL_NUMBER",
"macaddress",
"username",
"password",
"token",
"unique_id",
"SAID",
}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
whirlpool: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id]
diagnostics_data = {
"Washer_dryers": {
wd["NAME"]: dict(wd.items())
for wd in whirlpool.appliances_manager.washer_dryers
},
"aircons": {
ac["NAME"]: dict(ac.items()) for ac in whirlpool.appliances_manager.aircons
},
"ovens": {
oven["NAME"]: dict(oven.items())
for oven in whirlpool.appliances_manager.ovens
},
}
return {
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
"appliances": async_redact_data(diagnostics_data, TO_REDACT),
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["whirlpool"],
"requirements": ["whirlpool-sixth-sense==0.18.6"]
"requirements": ["whirlpool-sixth-sense==0.18.7"]
}

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from functools import partial
from holidays import HolidayBase, country_holidays
from homeassistant.config_entries import ConfigEntry
@@ -13,7 +15,7 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss
from .const import CONF_PROVINCE, DOMAIN, PLATFORMS
def _validate_country_and_province(
async def _async_validate_country_and_province(
hass: HomeAssistant, entry: ConfigEntry, country: str | None, province: str | None
) -> None:
"""Validate country and province."""
@@ -21,7 +23,7 @@ def _validate_country_and_province(
if not country:
return
try:
country_holidays(country)
await hass.async_add_executor_job(country_holidays, country)
except NotImplementedError as ex:
async_create_issue(
hass,
@@ -39,7 +41,9 @@ def _validate_country_and_province(
if not province:
return
try:
country_holidays(country, subdiv=province)
await hass.async_add_executor_job(
partial(country_holidays, country, subdiv=province)
)
except NotImplementedError as ex:
async_create_issue(
hass,
@@ -66,10 +70,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
country: str | None = entry.options.get(CONF_COUNTRY)
province: str | None = entry.options.get(CONF_PROVINCE)
_validate_country_and_province(hass, entry, country, province)
await _async_validate_country_and_province(hass, entry, country, province)
if country and CONF_LANGUAGE not in entry.options:
cls: HolidayBase = country_holidays(country, subdiv=province)
cls: HolidayBase = await hass.async_add_executor_job(
partial(country_holidays, country, subdiv=province)
)
default_language = cls.default_language
new_options = entry.options.copy()
new_options[CONF_LANGUAGE] = default_language

View File

@@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["holidays"],
"quality_scale": "internal",
"requirements": ["holidays==0.45"]
"requirements": ["holidays==0.46"]
}

View File

@@ -17,7 +17,7 @@
"switch_set_wifi_led_off": "mdi:wifi-off",
"switch_set_power_price": "mdi:currency-usd",
"switch_set_power_mode": "mdi:power",
"vacuum_remote_control_start": "mdi:start",
"vacuum_remote_control_start": "mdi:play",
"vacuum_remote_control_stop": "mdi:stop",
"vacuum_remote_control_move": "mdi:remote",
"vacuum_remote_control_move_step": "mdi:remote",

View File

@@ -8,5 +8,5 @@
"iot_class": "local_push",
"loggers": ["zeroconf"],
"quality_scale": "internal",
"requirements": ["zeroconf==0.131.0"]
"requirements": ["zeroconf==0.132.0"]
}

View File

@@ -124,8 +124,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
zha_data = get_zha_data(hass)
if zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True):
setup_quirks(
custom_quirks_path=zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH)
await hass.async_add_import_executor_job(
setup_quirks, zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH)
)
# Load and cache device trigger information early

View File

@@ -553,6 +553,13 @@ class OtaClientClusterHandler(ClientClusterHandler):
Ota.AttributeDefs.current_file_version.name: True,
}
@callback
def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None:
"""Handle an attribute updated on this cluster."""
# We intentionally avoid the `ClientClusterHandler` attribute update handler:
# it emits a logbook event on every update, which pollutes the logbook
ClusterHandler.attribute_updated(self, attrid, value, timestamp)
@property
def current_file_version(self) -> int | None:
"""Return cached value of current_file_version attribute."""

View File

@@ -130,14 +130,9 @@ class ZHAFirmwareUpdateEntity(
def _get_cluster_version(self) -> str | None:
"""Synchronize current file version with the cluster."""
device = self._ota_cluster_handler._endpoint.device # pylint: disable=protected-access
if self._ota_cluster_handler.current_file_version is not None:
return f"0x{self._ota_cluster_handler.current_file_version:08x}"
if device.sw_version is not None:
return device.sw_version
return None
@callback

View File

@@ -18,7 +18,7 @@ from .util.signal_type import SignalType
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2024
MINOR_VERSION: Final = 4
PATCH_VERSION: Final = "0b3"
PATCH_VERSION: Final = "1"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)

View File

@@ -401,6 +401,7 @@ class HomeAssistant:
self.services = ServiceRegistry(self)
self.states = StateMachine(self.bus, self.loop)
self.config = Config(self, config_dir)
self.config.async_initialize()
self.components = loader.Components(self)
self.helpers = loader.Helpers(self)
self.state: CoreState = CoreState.not_running
@@ -774,8 +775,11 @@ class HomeAssistant:
) -> asyncio.Future[_T]:
"""Add an executor job from within the event loop."""
task = self.loop.run_in_executor(None, target, *args)
self._tasks.add(task)
task.add_done_callback(self._tasks.remove)
tracked = asyncio.current_task() in self._tasks
task_bucket = self._tasks if tracked else self._background_tasks
task_bucket.add(task)
task.add_done_callback(task_bucket.remove)
return task
@@ -2586,12 +2590,12 @@ class ServiceRegistry:
class Config:
"""Configuration settings for Home Assistant."""
_store: Config._ConfigStore
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
"""Initialize a new config object."""
self.hass = hass
self._store = self._ConfigStore(self.hass, config_dir)
self.latitude: float = 0
self.longitude: float = 0
@@ -2642,6 +2646,13 @@ class Config:
# If Home Assistant is running in safe mode
self.safe_mode: bool = False
def async_initialize(self) -> None:
"""Finish initializing a config object.
This must be called before the config object is used.
"""
self._store = self._ConfigStore(self.hass)
def distance(self, lat: float, lon: float) -> float | None:
"""Calculate distance from Home Assistant.
@@ -2847,7 +2858,6 @@ class Config:
"country": self.country,
"language": self.language,
}
await self._store.async_save(data)
# Circular dependency prevents us from generating the class at top level
@@ -2857,7 +2867,7 @@ class Config:
class _ConfigStore(Store[dict[str, Any]]):
"""Class to help storing Config data."""
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize storage class."""
super().__init__(
hass,
@@ -2866,7 +2876,6 @@ class Config:
private=True,
atomic_writes=True,
minor_version=CORE_STORAGE_MINOR_VERSION,
config_dir=config_dir,
)
self._original_unit_system: str | None = None # from old store 1.1

View File

@@ -3741,7 +3741,7 @@
"integration_type": "device",
"config_flow": true,
"iot_class": "assumed_state",
"name": "Motionblinds BLE"
"name": "Motionblinds Bluetooth"
}
}
},

View File

@@ -198,15 +198,16 @@ async def async_check_ha_config_file( # noqa: C901
# Check if the integration has a custom config validator
config_validator = None
try:
config_validator = await integration.async_get_platform("config")
except ImportError as err:
# Filter out import error of the config platform.
# If the config platform contains bad imports, make sure
# that still fails.
if err.name != f"{integration.pkg_path}.config":
result.add_error(f"Error importing config platform {domain}: {err}")
continue
if integration.platforms_exists(("config",)):
try:
config_validator = await integration.async_get_platform("config")
except ImportError as err:
# Filter out import error of the config platform.
# If the config platform contains bad imports, make sure
# that still fails.
if err.name != f"{integration.pkg_path}.config":
result.add_error(f"Error importing config platform {domain}: {err}")
continue
if config_validator is not None and hasattr(
config_validator, "async_validate_config"

View File

@@ -1855,6 +1855,12 @@ def determine_script_action(action: dict[str, Any]) -> str:
"""Determine action type."""
if not (actions := ACTIONS_SET.intersection(action)):
raise ValueError("Unable to determine action")
if len(actions) > 1:
# Ambiguous action, select the first one in the
# order of the ACTIONS_MAP
for action_key, _script_action in ACTIONS_MAP.items():
if action_key in actions:
return _script_action
return ACTIONS_MAP[actions.pop()]

View File

@@ -24,7 +24,13 @@ from homeassistant.core import Context, HomeAssistant, State, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import bind_hass
from . import area_registry, config_validation as cv, device_registry, entity_registry
from . import (
area_registry,
config_validation as cv,
device_registry,
entity_registry,
floor_registry,
)
_LOGGER = logging.getLogger(__name__)
_SlotsType = dict[str, Any]
@@ -144,16 +150,18 @@ class NoStatesMatchedError(IntentError):
def __init__(
self,
name: str | None,
area: str | None,
domains: set[str] | None,
device_classes: set[str] | None,
name: str | None = None,
area: str | None = None,
floor: str | None = None,
domains: set[str] | None = None,
device_classes: set[str] | None = None,
) -> None:
"""Initialize error."""
super().__init__()
self.name = name
self.area = area
self.floor = floor
self.domains = domains
self.device_classes = device_classes
@@ -220,12 +228,35 @@ def _find_area(
return None
def _filter_by_area(
def _find_floor(
id_or_name: str, floors: floor_registry.FloorRegistry
) -> floor_registry.FloorEntry | None:
"""Find an floor by id or name, checking aliases too."""
floor = floors.async_get_floor(id_or_name) or floors.async_get_floor_by_name(
id_or_name
)
if floor is not None:
return floor
# Check floor aliases
for maybe_floor in floors.floors.values():
if not maybe_floor.aliases:
continue
for floor_alias in maybe_floor.aliases:
if id_or_name == floor_alias.casefold():
return maybe_floor
return None
def _filter_by_areas(
states_and_entities: list[tuple[State, entity_registry.RegistryEntry | None]],
area: area_registry.AreaEntry,
areas: Iterable[area_registry.AreaEntry],
devices: device_registry.DeviceRegistry,
) -> Iterable[tuple[State, entity_registry.RegistryEntry | None]]:
"""Filter state/entity pairs by an area."""
filter_area_ids: set[str | None] = {a.id for a in areas}
entity_area_ids: dict[str, str | None] = {}
for _state, entity in states_and_entities:
if entity is None:
@@ -241,7 +272,7 @@ def _filter_by_area(
entity_area_ids[entity.id] = device.area_id
for state, entity in states_and_entities:
if (entity is not None) and (entity_area_ids.get(entity.id) == area.id):
if (entity is not None) and (entity_area_ids.get(entity.id) in filter_area_ids):
yield (state, entity)
@@ -252,11 +283,14 @@ def async_match_states(
name: str | None = None,
area_name: str | None = None,
area: area_registry.AreaEntry | None = None,
floor_name: str | None = None,
floor: floor_registry.FloorEntry | None = None,
domains: Collection[str] | None = None,
device_classes: Collection[str] | None = None,
states: Iterable[State] | None = None,
entities: entity_registry.EntityRegistry | None = None,
areas: area_registry.AreaRegistry | None = None,
floors: floor_registry.FloorRegistry | None = None,
devices: device_registry.DeviceRegistry | None = None,
assistant: str | None = None,
) -> Iterable[State]:
@@ -268,6 +302,15 @@ def async_match_states(
if entities is None:
entities = entity_registry.async_get(hass)
if devices is None:
devices = device_registry.async_get(hass)
if areas is None:
areas = area_registry.async_get(hass)
if floors is None:
floors = floor_registry.async_get(hass)
# Gather entities
states_and_entities: list[tuple[State, entity_registry.RegistryEntry | None]] = []
for state in states:
@@ -294,20 +337,35 @@ def async_match_states(
if _is_device_class(state, entity, device_classes)
]
filter_areas: list[area_registry.AreaEntry] = []
if (floor is None) and (floor_name is not None):
# Look up floor by name
floor = _find_floor(floor_name, floors)
if floor is None:
_LOGGER.warning("Floor not found: %s", floor_name)
return
if floor is not None:
filter_areas = [
a for a in areas.async_list_areas() if a.floor_id == floor.floor_id
]
if (area is None) and (area_name is not None):
# Look up area by name
if areas is None:
areas = area_registry.async_get(hass)
area = _find_area(area_name, areas)
assert area is not None, f"No area named {area_name}"
if area is None:
_LOGGER.warning("Area not found: %s", area_name)
return
if area is not None:
# Filter by states/entities by area
if devices is None:
devices = device_registry.async_get(hass)
filter_areas = [area]
states_and_entities = list(_filter_by_area(states_and_entities, area, devices))
if filter_areas:
# Filter by states/entities by area
states_and_entities = list(
_filter_by_areas(states_and_entities, filter_areas, devices)
)
if assistant is not None:
# Filter by exposure
@@ -318,9 +376,6 @@ def async_match_states(
]
if name is not None:
if devices is None:
devices = device_registry.async_get(hass)
# Filter by name
name = name.casefold()
@@ -389,7 +444,7 @@ class DynamicServiceIntentHandler(IntentHandler):
"""
slot_schema = {
vol.Any("name", "area"): cv.string,
vol.Any("name", "area", "floor"): cv.string,
vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]),
vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]),
}
@@ -453,7 +508,7 @@ class DynamicServiceIntentHandler(IntentHandler):
# Don't match on name if targeting all entities
entity_name = None
# Look up area first to fail early
# Look up area to fail early
area_slot = slots.get("area", {})
area_id = area_slot.get("value")
area_name = area_slot.get("text")
@@ -464,6 +519,17 @@ class DynamicServiceIntentHandler(IntentHandler):
if area is None:
raise IntentHandleError(f"No area named {area_name}")
# Look up floor to fail early
floor_slot = slots.get("floor", {})
floor_id = floor_slot.get("value")
floor_name = floor_slot.get("text")
floor: floor_registry.FloorEntry | None = None
if floor_id is not None:
floors = floor_registry.async_get(hass)
floor = floors.async_get_floor(floor_id)
if floor is None:
raise IntentHandleError(f"No floor named {floor_name}")
# Optional domain/device class filters.
# Convert to sets for speed.
domains: set[str] | None = None
@@ -480,6 +546,7 @@ class DynamicServiceIntentHandler(IntentHandler):
hass,
name=entity_name,
area=area,
floor=floor,
domains=domains,
device_classes=device_classes,
assistant=intent_obj.assistant,
@@ -491,6 +558,7 @@ class DynamicServiceIntentHandler(IntentHandler):
raise NoStatesMatchedError(
name=entity_text or entity_name,
area=area_name or area_id,
floor=floor_name or floor_id,
domains=domains,
device_classes=device_classes,
)

View File

@@ -844,6 +844,48 @@ class EntitySelector(Selector[EntitySelectorConfig]):
return cast(list, vol.Schema([validate])(data)) # Output is a list
class FloorSelectorConfig(TypedDict, total=False):
"""Class to represent an floor selector config."""
entity: EntityFilterSelectorConfig | list[EntityFilterSelectorConfig]
device: DeviceFilterSelectorConfig | list[DeviceFilterSelectorConfig]
multiple: bool
@SELECTORS.register("floor")
class FloorSelector(Selector[AreaSelectorConfig]):
"""Selector of a single or list of floors."""
selector_type = "floor"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("entity"): vol.All(
cv.ensure_list,
[ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA],
),
vol.Optional("device"): vol.All(
cv.ensure_list,
[DEVICE_FILTER_SELECTOR_CONFIG_SCHEMA],
),
vol.Optional("multiple", default=False): cv.boolean,
}
)
def __init__(self, config: FloorSelectorConfig | None = None) -> None:
"""Instantiate a selector."""
super().__init__(config)
def __call__(self, data: Any) -> str | list[str]:
"""Validate the passed selection."""
if not self.config["multiple"]:
floor_id: str = vol.Schema(str)(data)
return floor_id
if not isinstance(data, list):
raise vol.Invalid("Value should be a list")
return [vol.Schema(str)(val) for val in data]
class IconSelectorConfig(TypedDict, total=False):
"""Class to represent an icon selector config."""

View File

@@ -95,9 +95,7 @@ async def async_migrator(
return config
def get_internal_store_manager(
hass: HomeAssistant, config_dir: str | None = None
) -> _StoreManager:
def get_internal_store_manager(hass: HomeAssistant) -> _StoreManager:
"""Get the store manager.
This function is not part of the API and should only be
@@ -105,7 +103,7 @@ def get_internal_store_manager(
guaranteed to be stable.
"""
if STORAGE_MANAGER not in hass.data:
manager = _StoreManager(hass, config_dir or hass.config.config_dir)
manager = _StoreManager(hass)
hass.data[STORAGE_MANAGER] = manager
return hass.data[STORAGE_MANAGER]
@@ -116,13 +114,13 @@ class _StoreManager:
The store manager is used to cache and manage storage files.
"""
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize storage manager class."""
self._hass = hass
self._invalidated: set[str] = set()
self._files: set[str] | None = None
self._data_preload: dict[str, json_util.JsonValueType] = {}
self._storage_path: Path = Path(config_dir).joinpath(STORAGE_DIR)
self._storage_path: Path = Path(hass.config.config_dir).joinpath(STORAGE_DIR)
self._cancel_cleanup: asyncio.TimerHandle | None = None
async def async_initialize(self) -> None:
@@ -251,7 +249,6 @@ class Store(Generic[_T]):
encoder: type[JSONEncoder] | None = None,
minor_version: int = 1,
read_only: bool = False,
config_dir: str | None = None,
) -> None:
"""Initialize storage class."""
self.version = version
@@ -268,7 +265,7 @@ class Store(Generic[_T]):
self._atomic_writes = atomic_writes
self._read_only = read_only
self._next_write_time = 0.0
self._manager = get_internal_store_manager(hass, config_dir)
self._manager = get_internal_store_manager(hass)
@cached_property
def path(self):

View File

@@ -1408,6 +1408,12 @@ def floor_id(hass: HomeAssistant, lookup_value: Any) -> str | None:
floor_registry = fr.async_get(hass)
if floor := floor_registry.async_get_floor_by_name(str(lookup_value)):
return floor.floor_id
if aid := area_id(hass, lookup_value):
area_reg = area_registry.async_get(hass)
if area := area_reg.async_get_area(aid):
return area.floor_id
return None
@@ -1416,6 +1422,16 @@ def floor_name(hass: HomeAssistant, lookup_value: str) -> str | None:
floor_registry = fr.async_get(hass)
if floor := floor_registry.async_get_floor(lookup_value):
return floor.name
if aid := area_id(hass, lookup_value):
area_reg = area_registry.async_get(hass)
if (
(area := area_reg.async_get_area(aid))
and area.floor_id
and (floor := floor_registry.async_get_floor(area.floor_id))
):
return floor.name
return None

View File

@@ -750,9 +750,7 @@ class Integration:
self._import_futures: dict[str, asyncio.Future[ModuleType]] = {}
cache: dict[str, ModuleType | ComponentProtocol] = hass.data[DATA_COMPONENTS]
self._cache = cache
missing_platforms_cache: dict[str, ImportError] = hass.data[
DATA_MISSING_PLATFORMS
]
missing_platforms_cache: dict[str, bool] = hass.data[DATA_MISSING_PLATFORMS]
self._missing_platforms_cache = missing_platforms_cache
self._top_level_files = top_level_files or set()
_LOGGER.info("Loaded %s from %s", self.domain, pkg_path)
@@ -1085,8 +1083,7 @@ class Integration:
import_futures: list[tuple[str, asyncio.Future[ModuleType]]] = []
for platform_name in platform_names:
full_name = f"{domain}.{platform_name}"
if platform := self._get_platform_cached_or_raise(full_name):
if platform := self._get_platform_cached_or_raise(platform_name):
platforms[platform_name] = platform
continue
@@ -1095,6 +1092,7 @@ class Integration:
in_progress_imports[platform_name] = future
continue
full_name = f"{domain}.{platform_name}"
if (
self.import_executor
and full_name not in self.hass.config.components
@@ -1166,14 +1164,18 @@ class Integration:
return platforms
def _get_platform_cached_or_raise(self, full_name: str) -> ModuleType | None:
def _get_platform_cached_or_raise(self, platform_name: str) -> ModuleType | None:
"""Return a platform for an integration from cache."""
full_name = f"{self.domain}.{platform_name}"
if full_name in self._cache:
# the cache is either a ModuleType or a ComponentProtocol
# but we only care about the ModuleType here
return self._cache[full_name] # type: ignore[return-value]
if full_name in self._missing_platforms_cache:
raise self._missing_platforms_cache[full_name]
raise ModuleNotFoundError(
f"Platform {full_name} not found",
name=f"{self.pkg_path}.{platform_name}",
)
return None
def platforms_are_loaded(self, platform_names: Iterable[str]) -> bool:
@@ -1189,9 +1191,7 @@ class Integration:
def get_platform(self, platform_name: str) -> ModuleType:
"""Return a platform for an integration."""
if platform := self._get_platform_cached_or_raise(
f"{self.domain}.{platform_name}"
):
if platform := self._get_platform_cached_or_raise(platform_name):
return platform
return self._load_platform(platform_name)
@@ -1212,10 +1212,7 @@ class Integration:
):
existing_platforms.append(platform_name)
continue
missing_platforms[full_name] = ModuleNotFoundError(
f"Platform {full_name} not found",
name=f"{self.pkg_path}.{platform_name}",
)
missing_platforms[full_name] = True
return existing_platforms
@@ -1233,11 +1230,13 @@ class Integration:
cache: dict[str, ModuleType] = self.hass.data[DATA_COMPONENTS]
try:
cache[full_name] = self._import_platform(platform_name)
except ImportError as ex:
except ModuleNotFoundError:
if self.domain in cache:
# If the domain is loaded, cache that the platform
# does not exist so we do not try to load it again
self._missing_platforms_cache[full_name] = ex
self._missing_platforms_cache[full_name] = True
raise
except ImportError:
raise
except RuntimeError as err:
# _DeadlockError inherits from RuntimeError

View File

@@ -30,8 +30,8 @@ habluetooth==2.4.2
hass-nabucasa==0.79.0
hassil==1.6.1
home-assistant-bluetooth==1.12.0
home-assistant-frontend==20240329.1
home-assistant-intents==2024.3.27
home-assistant-frontend==20240404.1
home-assistant-intents==2024.4.3
httpx==0.27.0
ifaddr==0.2.0
Jinja2==3.1.3
@@ -60,7 +60,7 @@ voluptuous-serialize==2.6.0
voluptuous==0.13.1
webrtc-noise-gain==1.2.3
yarl==1.9.4
zeroconf==0.131.0
zeroconf==0.132.0
# Constrain pycryptodome to avoid vulnerability
# see https://github.com/home-assistant/core/pull/16238

View File

@@ -504,6 +504,12 @@ async def async_prepare_setup_platform(
log_error(f"Unable to import the component ({exc}).")
return None
if not integration.platforms_exists((domain,)):
log_error(
f"Platform not found (No module named '{integration.pkg_path}.{domain}')"
)
return None
try:
platform = await integration.async_get_platform(domain)
except ImportError as exc:

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2024.4.0b3"
version = "2024.4.1"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@@ -513,8 +513,6 @@ filterwarnings = [
"ignore:invalid escape sequence:SyntaxWarning:.*stringcase",
# https://github.com/pyudev/pyudev/pull/466 - >=0.24.0
"ignore:invalid escape sequence:SyntaxWarning:.*pyudev.monitor",
# https://github.com/xeniter/romy/pull/1 - >=0.0.8
"ignore:with timeout\\(\\) is deprecated, use async with timeout\\(\\) instead:DeprecationWarning:romy.utils",
# https://github.com/grahamwetzler/smart-meter-texas/pull/143 - >0.5.3
"ignore:ssl.OP_NO_SSL\\*/ssl.OP_NO_TLS\\* options are deprecated:DeprecationWarning:smart_meter_texas",
# https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1

View File

@@ -45,7 +45,7 @@ Mastodon.py==1.8.1
Pillow==10.2.0
# homeassistant.components.plex
PlexAPI==4.15.10
PlexAPI==4.15.11
# homeassistant.components.progettihwsw
ProgettiHWSW==0.1.3
@@ -185,7 +185,7 @@ aio-georss-gdacs==0.9
aioairq==0.3.2
# homeassistant.components.airzone_cloud
aioairzone-cloud==0.4.6
aioairzone-cloud==0.4.7
# homeassistant.components.airzone
aioairzone==0.7.6
@@ -392,7 +392,7 @@ aiotankerkoenig==0.4.1
aiotractive==0.5.6
# homeassistant.components.unifi
aiounifi==73
aiounifi==74
# homeassistant.components.vlc_telnet
aiovlc==0.1.0
@@ -514,7 +514,7 @@ aurorapy==0.2.7
# avion==0.10
# homeassistant.components.axis
axis==59
axis==60
# homeassistant.components.azure_event_hub
azure-eventhub==5.11.1
@@ -867,7 +867,7 @@ fixerio==1.0.0a0
fjaraskupan==2.3.0
# homeassistant.components.flexit_bacnet
flexit_bacnet==2.1.0
flexit_bacnet==2.2.1
# homeassistant.components.flipr
flipr-api==1.5.1
@@ -1074,13 +1074,13 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.45
holidays==0.46
# homeassistant.components.frontend
home-assistant-frontend==20240329.1
home-assistant-frontend==20240404.1
# homeassistant.components.conversation
home-assistant-intents==2024.3.27
home-assistant-intents==2024.4.3
# homeassistant.components.home_connect
homeconnect==0.7.2
@@ -1349,7 +1349,7 @@ mutesync==0.0.1
mypermobil==0.1.8
# homeassistant.components.myuplink
myuplink==0.5.0
myuplink==0.6.0
# homeassistant.components.nad
nad-receiver==0.3.0
@@ -1482,7 +1482,7 @@ openwrt-luci-rpc==1.1.17
openwrt-ubus-rpc==0.0.2
# homeassistant.components.opower
opower==0.4.1
opower==0.4.3
# homeassistant.components.oralb
oralb-ble==0.17.6
@@ -2444,7 +2444,7 @@ rfk101py==0.0.1
rflink==0.0.66
# homeassistant.components.ring
ring-doorbell[listen]==0.8.8
ring-doorbell[listen]==0.8.9
# homeassistant.components.fleetgo
ritassist==0.9.2
@@ -2459,10 +2459,10 @@ rocketchat-API==0.6.1
rokuecp==0.19.2
# homeassistant.components.romy
romy==0.0.7
romy==0.0.10
# homeassistant.components.roomba
roombapy==1.6.13
roombapy==1.8.1
# homeassistant.components.roon
roonapi==0.1.6
@@ -2654,6 +2654,9 @@ synology-srm==0.2.0
# homeassistant.components.system_bridge
systembridgeconnector==4.0.3
# homeassistant.components.system_bridge
systembridgemodels==4.0.4
# homeassistant.components.tailscale
tailscale==0.6.0
@@ -2795,7 +2798,7 @@ vallox-websocket-api==5.1.1
vehicle==2.2.1
# homeassistant.components.velbus
velbus-aio==2023.12.0
velbus-aio==2024.4.0
# homeassistant.components.venstar
venstarcolortouch==0.19
@@ -2838,7 +2841,7 @@ watchdog==2.3.1
waterfurnace==1.1.0
# homeassistant.components.weatherflow_cloud
weatherflow4py==0.2.17
weatherflow4py==0.2.20
# homeassistant.components.webmin
webmin-xmlrpc==0.0.2
@@ -2847,7 +2850,7 @@ webmin-xmlrpc==0.0.2
webrtc-noise-gain==1.2.3
# homeassistant.components.whirlpool
whirlpool-sixth-sense==0.18.6
whirlpool-sixth-sense==0.18.7
# homeassistant.components.whois
whois==0.9.27
@@ -2925,7 +2928,7 @@ zamg==0.3.6
zengge==0.2
# homeassistant.components.zeroconf
zeroconf==0.131.0
zeroconf==0.132.0
# homeassistant.components.zeversolar
zeversolar==0.3.1

View File

@@ -39,7 +39,7 @@ HATasmota==0.8.0
Pillow==10.2.0
# homeassistant.components.plex
PlexAPI==4.15.10
PlexAPI==4.15.11
# homeassistant.components.progettihwsw
ProgettiHWSW==0.1.3
@@ -164,7 +164,7 @@ aio-georss-gdacs==0.9
aioairq==0.3.2
# homeassistant.components.airzone_cloud
aioairzone-cloud==0.4.6
aioairzone-cloud==0.4.7
# homeassistant.components.airzone
aioairzone==0.7.6
@@ -365,7 +365,7 @@ aiotankerkoenig==0.4.1
aiotractive==0.5.6
# homeassistant.components.unifi
aiounifi==73
aiounifi==74
# homeassistant.components.vlc_telnet
aiovlc==0.1.0
@@ -454,7 +454,7 @@ auroranoaa==0.0.3
aurorapy==0.2.7
# homeassistant.components.axis
axis==59
axis==60
# homeassistant.components.azure_event_hub
azure-eventhub==5.11.1
@@ -705,7 +705,7 @@ fivem-api==0.1.2
fjaraskupan==2.3.0
# homeassistant.components.flexit_bacnet
flexit_bacnet==2.1.0
flexit_bacnet==2.2.1
# homeassistant.components.flipr
flipr-api==1.5.1
@@ -873,13 +873,13 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.45
holidays==0.46
# homeassistant.components.frontend
home-assistant-frontend==20240329.1
home-assistant-frontend==20240404.1
# homeassistant.components.conversation
home-assistant-intents==2024.3.27
home-assistant-intents==2024.4.3
# homeassistant.components.home_connect
homeconnect==0.7.2
@@ -1088,7 +1088,7 @@ mutesync==0.0.1
mypermobil==0.1.8
# homeassistant.components.myuplink
myuplink==0.5.0
myuplink==0.6.0
# homeassistant.components.keenetic_ndms2
ndms2-client==0.1.2
@@ -1176,7 +1176,7 @@ openerz-api==0.3.0
openhomedevice==2.2.0
# homeassistant.components.opower
opower==0.4.1
opower==0.4.3
# homeassistant.components.oralb
oralb-ble==0.17.6
@@ -1887,16 +1887,16 @@ reolink-aio==0.8.9
rflink==0.0.66
# homeassistant.components.ring
ring-doorbell[listen]==0.8.8
ring-doorbell[listen]==0.8.9
# homeassistant.components.roku
rokuecp==0.19.2
# homeassistant.components.romy
romy==0.0.7
romy==0.0.10
# homeassistant.components.roomba
roombapy==1.6.13
roombapy==1.8.1
# homeassistant.components.roon
roonapi==0.1.6
@@ -2049,6 +2049,9 @@ switchbot-api==2.0.0
# homeassistant.components.system_bridge
systembridgeconnector==4.0.3
# homeassistant.components.system_bridge
systembridgemodels==4.0.4
# homeassistant.components.tailscale
tailscale==0.6.0
@@ -2151,7 +2154,7 @@ vallox-websocket-api==5.1.1
vehicle==2.2.1
# homeassistant.components.velbus
velbus-aio==2023.12.0
velbus-aio==2024.4.0
# homeassistant.components.venstar
venstarcolortouch==0.19
@@ -2185,7 +2188,7 @@ wallbox==0.6.0
watchdog==2.3.1
# homeassistant.components.weatherflow_cloud
weatherflow4py==0.2.17
weatherflow4py==0.2.20
# homeassistant.components.webmin
webmin-xmlrpc==0.0.2
@@ -2194,7 +2197,7 @@ webmin-xmlrpc==0.0.2
webrtc-noise-gain==1.2.3
# homeassistant.components.whirlpool
whirlpool-sixth-sense==0.18.6
whirlpool-sixth-sense==0.18.7
# homeassistant.components.whois
whois==0.9.27
@@ -2260,7 +2263,7 @@ yt-dlp==2024.03.10
zamg==0.3.6
# homeassistant.components.zeroconf
zeroconf==0.131.0
zeroconf==0.132.0
# homeassistant.components.zeversolar
zeversolar==0.3.1

View File

@@ -1461,7 +1461,10 @@ def mock_integration(
def mock_platform(
hass: HomeAssistant, platform_path: str, module: Mock | MockPlatform | None = None
hass: HomeAssistant,
platform_path: str,
module: Mock | MockPlatform | None = None,
built_in=True,
) -> None:
"""Mock a platform.
@@ -1472,7 +1475,7 @@ def mock_platform(
module_cache = hass.data[loader.DATA_COMPONENTS]
if domain not in integration_cache:
mock_integration(hass, MockModule(domain))
mock_integration(hass, MockModule(domain), built_in=built_in)
integration_cache[domain]._top_level_files.add(f"{platform_name}.py")
_LOGGER.info("Adding mock integration platform: %s", platform_path)
@@ -1665,6 +1668,7 @@ def setup_test_component_platform(
domain: str,
entities: Sequence[Entity],
from_config_entry: bool = False,
built_in: bool = True,
) -> MockPlatform:
"""Mock a test component platform for tests."""
@@ -1695,9 +1699,5 @@ def setup_test_component_platform(
platform.async_setup_entry = _async_setup_entry
platform.async_setup_platform = None
mock_platform(
hass,
f"test.{domain}",
platform,
)
mock_platform(hass, f"test.{domain}", platform, built_in=built_in)
return platform

View File

@@ -4,9 +4,11 @@ import datetime
from unittest.mock import Mock
from aiohttp import ClientResponseError
from freezegun.api import FrozenDateTimeFactory
import pytest
from yalexs.pubnub_async import AugustPubNub
from homeassistant.components.august.activity import INITIAL_LOCK_RESYNC_TIME
from homeassistant.components.lock import (
DOMAIN as LOCK_DOMAIN,
STATE_JAMMED,
@@ -155,7 +157,9 @@ async def test_one_lock_operation(
async def test_one_lock_operation_pubnub_connected(
hass: HomeAssistant, entity_registry: er.EntityRegistry
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test lock and unlock operations are async when pubnub is connected."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
@@ -230,6 +234,23 @@ async def test_one_lock_operation_pubnub_connected(
== STATE_UNKNOWN
)
freezer.tick(INITIAL_LOCK_RESYNC_TIME)
pubnub.message(
pubnub,
Mock(
channel=lock_one.pubsub_channel,
timetoken=(dt_util.utcnow().timestamp() + 2) * 10000000,
message={
"status": "kAugLockState_Unlocked",
},
),
)
await hass.async_block_till_done()
lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name")
assert lock_online_with_doorsense_name.state == STATE_UNLOCKED
async def test_lock_jammed(hass: HomeAssistant) -> None:
"""Test lock gets jammed on unlock."""

View File

@@ -201,7 +201,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory)
):
freezer.tick(SCAN_INTERVAL * 2)
async_fire_time_changed(hass)
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
power = hass.states.get("sensor.mydevicename_total_energy")
assert power.state == "unknown"
# sun rose again
@@ -218,7 +218,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory)
):
freezer.tick(SCAN_INTERVAL * 4)
async_fire_time_changed(hass)
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
power = hass.states.get("sensor.mydevicename_power_output")
assert power is not None
assert power.state == "45.7"
@@ -237,7 +237,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory)
):
freezer.tick(SCAN_INTERVAL * 6)
async_fire_time_changed(hass)
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
power = hass.states.get("sensor.mydevicename_power_output")
assert power.state == "unknown" # should this be 'available'?
@@ -277,7 +277,7 @@ async def test_sensor_unknown_error(
):
freezer.tick(SCAN_INTERVAL * 2)
async_fire_time_changed(hass)
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
assert (
"Exception: AuroraError('another error') occurred, 2 retries remaining"
in caplog.text

View File

@@ -74,6 +74,7 @@ MQTT_CLIENT_RESPONSE = {
"status": {"state": "active", "connectionStatus": "Connected"},
"config": {
"server": {"protocol": "tcp", "host": "192.168.0.90", "port": 1883},
"deviceTopicPrefix": f"axis/{MAC}",
},
},
}

View File

@@ -91,9 +91,9 @@ async def test_device_support_mqtt(
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry
) -> None:
"""Successful setup."""
mqtt_mock.async_subscribe.assert_called_with(f"{MAC}/#", mock.ANY, 0, "utf-8")
mqtt_mock.async_subscribe.assert_called_with(f"axis/{MAC}/#", mock.ANY, 0, "utf-8")
topic = f"{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0"
topic = f"axis/{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0"
message = (
b'{"timestamp": 1590258472044, "topic": "onvif:Device/axis:Sensor/PIR",'
b' "message": {"source": {"sensor": "0"}, "key": {}, "data": {"state": "1"}}}'

View File

@@ -278,7 +278,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None:
result["flow_id"], {"known_hosts": "192.168.0.1, 192.168.0.2"}
)
assert result["type"] == "create_entry"
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
config_entry = hass.config_entries.async_entries("cast")[0]
assert castbrowser_mock.return_value.start_discovery.call_count == 1
@@ -291,7 +291,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None:
user_input={"known_hosts": "192.168.0.11, 192.168.0.12"},
)
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
castbrowser_mock.return_value.start_discovery.assert_not_called()
castbrowser_mock.assert_not_called()

View File

@@ -137,8 +137,8 @@ async def async_setup_cast_internal_discovery(hass, config=None):
return_value=browser,
) as cast_browser:
add_entities = await async_setup_cast(hass, config)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
await hass.async_block_till_done(wait_background_tasks=True)
assert browser.start_discovery.call_count == 1
@@ -209,8 +209,8 @@ async def async_setup_media_player_cast(hass: HomeAssistant, info: ChromecastInf
entry = MockConfigEntry(data=data, domain="cast")
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done(wait_background_tasks=True)
await hass.async_block_till_done(wait_background_tasks=True)
discovery_callback = cast_browser.call_args[0][0].add_cast

View File

@@ -10,6 +10,7 @@ from homeassistant.const import STATE_OFF, STATE_ON
if TYPE_CHECKING:
from tests.components.light.common import MockLight
from tests.components.sensor.common import MockSensor
@pytest.fixture(scope="session", autouse=True)
@@ -118,3 +119,11 @@ def mock_light_entities() -> list["MockLight"]:
MockLight("Ceiling", STATE_OFF),
MockLight(None, STATE_OFF),
]
@pytest.fixture
def mock_sensor_entities() -> dict[str, "MockSensor"]:
"""Return mocked sensor entities."""
from tests.components.sensor.common import get_mock_sensor_entities
return get_mock_sensor_entities()

Some files were not shown because too many files have changed in this diff Show More