From b2e7808114eecb40c72556ddea222216741d9143 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 27 Mar 2024 18:27:38 +0100 Subject: [PATCH 001/426] Bump version to 2024.4.0b0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index ee15cfd72c3..d458a66b865 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0.dev0" +PATCH_VERSION: Final = "0b0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 965827f41ea..c84405c2764 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0.dev0" +version = "2024.4.0b0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 9319528e0e31ebf6cde2b4db8f73048e2a7955e0 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Thu, 28 Mar 2024 15:44:50 +0100 Subject: [PATCH 002/426] Use fallback voice for selected language in cloud (#114246) Co-authored-by: Erik Montnemery --- homeassistant/components/cloud/tts.py | 24 +++++- tests/components/cloud/test_tts.py | 109 ++++++++++++++++++++++---- 2 files changed, 115 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 7922fc80201..42e4b94a189 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -140,7 +140,6 @@ class CloudTTSEntity(TextToSpeechEntity): """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, - ATTR_VOICE: self._voice, } @property @@ -178,7 +177,18 @@ class CloudTTSEntity(TextToSpeechEntity): gender: Gender | str | None = options.get(ATTR_GENDER) gender = handle_deprecated_gender(self.hass, gender) original_voice: str | None = options.get(ATTR_VOICE) + if original_voice is None and language == self._language: + original_voice = self._voice voice = handle_deprecated_voice(self.hass, original_voice) + if voice not in TTS_VOICES[language]: + default_voice = TTS_VOICES[language][0] + _LOGGER.debug( + "Unsupported voice %s detected, falling back to default %s for %s", + voice, + default_voice, + language, + ) + voice = default_voice # Process TTS try: data = await self.cloud.voice.process_tts( @@ -237,7 +247,6 @@ class CloudProvider(Provider): """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, - ATTR_VOICE: self._voice, } async def async_get_tts_audio( @@ -248,7 +257,18 @@ class CloudProvider(Provider): gender: Gender | str | None = options.get(ATTR_GENDER) gender = handle_deprecated_gender(self.hass, gender) original_voice: str | None = options.get(ATTR_VOICE) + if original_voice is None and language == self._language: + original_voice = self._voice voice = handle_deprecated_voice(self.hass, original_voice) + if voice not in TTS_VOICES[language]: + default_voice = TTS_VOICES[language][0] + _LOGGER.debug( + "Unsupported voice %s detected, falling back to default %s for %s", + voice, + default_voice, + language, + ) + voice = default_voice # Process TTS try: data = await self.cloud.voice.process_tts( diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 3fd9ec5e4a4..06dbcf174a7 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -12,10 +12,20 @@ import voluptuous as vol from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud import DOMAIN, const, tts -from homeassistant.components.tts import DOMAIN as TTS_DOMAIN +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) +from homeassistant.components.tts import ( + ATTR_LANGUAGE, + ATTR_MEDIA_PLAYER_ENTITY_ID, + ATTR_MESSAGE, + DOMAIN as TTS_DOMAIN, +) from homeassistant.components.tts.helper import get_engine_instance from homeassistant.config import async_process_ha_core_config -from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.helpers.issue_registry import IssueRegistry, IssueSeverity @@ -23,6 +33,8 @@ from homeassistant.setup import async_setup_component from . import PIPELINE_DATA +from tests.common import async_mock_service +from tests.components.tts.common import get_media_source_url from tests.typing import ClientSessionGenerator @@ -120,13 +132,13 @@ async def test_prefs_default_voice( assert engine is not None # The platform config provider will be overridden by the discovery info provider. assert engine.default_language == "en-US" - assert engine.default_options == {"audio_output": "mp3", "voice": "JennyNeural"} + assert engine.default_options == {"audio_output": "mp3"} await set_cloud_prefs({"tts_default_voice": ("nl-NL", "MaartenNeural")}) await hass.async_block_till_done() assert engine.default_language == "nl-NL" - assert engine.default_options == {"audio_output": "mp3", "voice": "MaartenNeural"} + assert engine.default_options == {"audio_output": "mp3"} async def test_deprecated_platform_config( @@ -228,11 +240,11 @@ async def test_get_tts_audio( "url": ( "http://example.local:8123/api/tts_proxy/" "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{expected_url_suffix}.mp3" + f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" ), "path": ( "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{expected_url_suffix}.mp3" + f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" ), } await hass.async_block_till_done() @@ -242,6 +254,7 @@ async def test_get_tts_audio( assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." assert mock_process_tts.call_args.kwargs["language"] == "en-US" assert mock_process_tts.call_args.kwargs["gender"] is None + assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural" assert mock_process_tts.call_args.kwargs["output"] == "mp3" @@ -280,11 +293,11 @@ async def test_get_tts_audio_logged_out( "url": ( "http://example.local:8123/api/tts_proxy/" "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{expected_url_suffix}.mp3" + f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" ), "path": ( "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{expected_url_suffix}.mp3" + f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" ), } await hass.async_block_till_done() @@ -294,6 +307,7 @@ async def test_get_tts_audio_logged_out( assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." assert mock_process_tts.call_args.kwargs["language"] == "en-US" assert mock_process_tts.call_args.kwargs["gender"] is None + assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural" assert mock_process_tts.call_args.kwargs["output"] == "mp3" @@ -344,11 +358,11 @@ async def test_tts_entity( "url": ( "http://example.local:8123/api/tts_proxy/" "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{entity_id}.mp3" + f"_en-us_6e8b81ac47_{entity_id}.mp3" ), "path": ( "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_5c97d21c48_{entity_id}.mp3" + f"_en-us_6e8b81ac47_{entity_id}.mp3" ), } await hass.async_block_till_done() @@ -358,6 +372,7 @@ async def test_tts_entity( assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." assert mock_process_tts.call_args.kwargs["language"] == "en-US" assert mock_process_tts.call_args.kwargs["gender"] is None + assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural" assert mock_process_tts.call_args.kwargs["output"] == "mp3" state = hass.states.get(entity_id) @@ -632,11 +647,11 @@ async def test_deprecated_gender( "url": ( "http://example.local:8123/api/tts_proxy/" "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_5c97d21c48_{expected_url_suffix}.mp3" + f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" ), "path": ( "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_5c97d21c48_{expected_url_suffix}.mp3" + f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" ), } await hass.async_block_till_done() @@ -645,7 +660,7 @@ async def test_deprecated_gender( assert mock_process_tts.call_args is not None assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." assert mock_process_tts.call_args.kwargs["language"] == language - assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural" + assert mock_process_tts.call_args.kwargs["voice"] == "XiaoxiaoNeural" assert mock_process_tts.call_args.kwargs["output"] == "mp3" issue = issue_registry.async_get_issue("cloud", "deprecated_gender") assert issue is None @@ -662,11 +677,11 @@ async def test_deprecated_gender( "url": ( "http://example.local:8123/api/tts_proxy/" "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_5dded72256_{expected_url_suffix}.mp3" + f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" ), "path": ( "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_5dded72256_{expected_url_suffix}.mp3" + f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" ), } await hass.async_block_till_done() @@ -678,7 +693,7 @@ async def test_deprecated_gender( assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." assert mock_process_tts.call_args.kwargs["language"] == language assert mock_process_tts.call_args.kwargs["gender"] == gender_option - assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural" + assert mock_process_tts.call_args.kwargs["voice"] == "XiaoxiaoNeural" assert mock_process_tts.call_args.kwargs["output"] == "mp3" issue = issue_registry.async_get_issue("cloud", issue_id) assert issue is not None @@ -733,3 +748,65 @@ async def test_deprecated_gender( } assert not issue_registry.async_get_issue(DOMAIN, issue_id) + + +@pytest.mark.parametrize( + ("service", "service_data"), + [ + ( + "speak", + { + ATTR_ENTITY_ID: "tts.home_assistant_cloud", + ATTR_LANGUAGE: "id-ID", + ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + ATTR_MESSAGE: "There is someone at the door.", + }, + ), + ( + "cloud_say", + { + ATTR_ENTITY_ID: "media_player.something", + ATTR_LANGUAGE: "id-ID", + ATTR_MESSAGE: "There is someone at the door.", + }, + ), + ], +) +async def test_tts_services( + hass: HomeAssistant, + cloud: MagicMock, + hass_client: ClientSessionGenerator, + service: str, + service_data: dict[str, Any], +) -> None: + """Test tts services.""" + calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) + mock_process_tts = AsyncMock(return_value=b"") + cloud.voice.process_tts = mock_process_tts + + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + await cloud.login("test-user", "test-pass") + client = await hass_client() + + await hass.services.async_call( + domain=TTS_DOMAIN, + service=service, + service_data=service_data, + blocking=True, + ) + + assert len(calls) == 1 + + url = await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await hass.async_block_till_done() + response = await client.get(url) + assert response.status == HTTPStatus.OK + await hass.async_block_till_done() + + assert mock_process_tts.call_count == 1 + assert mock_process_tts.call_args is not None + assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door." + assert mock_process_tts.call_args.kwargs["language"] == service_data[ATTR_LANGUAGE] + assert mock_process_tts.call_args.kwargs["voice"] == "GadisNeural" + assert mock_process_tts.call_args.kwargs["output"] == "mp3" From e2710184cb11cf58a461120039550ada1b41ea43 Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Wed, 27 Mar 2024 18:53:18 +0100 Subject: [PATCH 003/426] Bump zha-quirks to 0.0.113 (#114311) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index e85966e870f..e9d75584064 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -24,7 +24,7 @@ "bellows==0.38.1", "pyserial==3.5", "pyserial-asyncio==0.6", - "zha-quirks==0.0.112", + "zha-quirks==0.0.113", "zigpy-deconz==0.23.1", "zigpy==0.63.5", "zigpy-xbee==0.20.1", diff --git a/requirements_all.txt b/requirements_all.txt index 1aedcb73671..42e92c3de6f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2931,7 +2931,7 @@ zeroconf==0.131.0 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.112 +zha-quirks==0.0.113 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9795ef99481..3548eb7fadc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2266,7 +2266,7 @@ zeroconf==0.131.0 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.112 +zha-quirks==0.0.113 # homeassistant.components.zha zigpy-deconz==0.23.1 From a18184a4c08621e111ac154d6f622d4215a2d546 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 28 Mar 2024 00:43:34 +0100 Subject: [PATCH 004/426] Bump pyduotecno to 2024.3.2 (#114320) --- homeassistant/components/duotecno/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/duotecno/manifest.json b/homeassistant/components/duotecno/manifest.json index 7b33784a612..0c8eab8f0a0 100644 --- a/homeassistant/components/duotecno/manifest.json +++ b/homeassistant/components/duotecno/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"], "quality_scale": "silver", - "requirements": ["pyDuotecno==2024.1.2"] + "requirements": ["pyDuotecno==2024.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 42e92c3de6f..ace129c69f2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1648,7 +1648,7 @@ pyCEC==0.5.2 pyControl4==1.1.0 # homeassistant.components.duotecno -pyDuotecno==2024.1.2 +pyDuotecno==2024.3.2 # homeassistant.components.electrasmart pyElectra==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3548eb7fadc..b01a7ca8ba4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1298,7 +1298,7 @@ pyCEC==0.5.2 pyControl4==1.1.0 # homeassistant.components.duotecno -pyDuotecno==2024.1.2 +pyDuotecno==2024.3.2 # homeassistant.components.electrasmart pyElectra==1.2.0 From 00993a6be3dd454828c944f5e9db8e03df5209da Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 27 Mar 2024 22:35:08 +0100 Subject: [PATCH 005/426] Fix Matter airconditioner discovery of climate platform (#114326) * Discover Thermostat platform for Room Airconditioner device * add test * Adjust docstring Co-authored-by: TheJulianJES --------- Co-authored-by: Martin Hjelmare Co-authored-by: TheJulianJES --- homeassistant/components/matter/climate.py | 2 +- homeassistant/components/matter/switch.py | 1 + .../fixtures/nodes/room-airconditioner.json | 256 ++++++++++++++++++ tests/components/matter/test_climate.py | 25 ++ 4 files changed, 283 insertions(+), 1 deletion(-) create mode 100644 tests/components/matter/fixtures/nodes/room-airconditioner.json diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index 5ae1f7ca486..1b949d3ebfb 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -313,6 +313,6 @@ DISCOVERY_SCHEMAS = [ clusters.Thermostat.Attributes.UnoccupiedCoolingSetpoint, clusters.Thermostat.Attributes.UnoccupiedHeatingSetpoint, ), - device_type=(device_types.Thermostat,), + device_type=(device_types.Thermostat, device_types.RoomAirConditioner), ), ] diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index 91a28bdab8c..9bc858d40c0 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -86,6 +86,7 @@ DISCOVERY_SCHEMAS = [ device_types.ColorDimmerSwitch, device_types.DimmerSwitch, device_types.Thermostat, + device_types.RoomAirConditioner, ), ), ] diff --git a/tests/components/matter/fixtures/nodes/room-airconditioner.json b/tests/components/matter/fixtures/nodes/room-airconditioner.json new file mode 100644 index 00000000000..11c29b0d8f4 --- /dev/null +++ b/tests/components/matter/fixtures/nodes/room-airconditioner.json @@ -0,0 +1,256 @@ +{ + "node_id": 36, + "date_commissioned": "2024-03-27T17:31:23.745932", + "last_interview": "2024-03-27T17:31:23.745939", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1, 2], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "254": 5 + }, + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 6 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "TEST_VENDOR", + "0/40/2": 65521, + "0/40/3": "Room AirConditioner", + "0/40/4": 32774, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "E47F334E22A56610", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 1, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 65528, + 65529, 65531, 65532, 65533 + ], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 0, + "0/49/1": null, + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": false, + "0/49/5": 0, + "0/49/6": "", + "0/49/7": 0, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 2, 3, 4, 6, 8], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 0, + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [], + "0/51/65531": [0, 1, 8, 65528, 65529, 65531, 65532, 65533], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [ + { + "254": 5 + }, + { + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRJBgkBwEkCAEwCUEE7pKHHHlljFuw2MAQJFOAzVR5tPPIXOjxHrLr7el8KqThQ6CuCFwdmNztUaIQgBcPZm6QRoEn6OGoFoAG8vB0KTcKNQEoARgkAgE2AwQCBAEYMAQUEvPPXEC80Bhik9ZDF3HK0Jo0RG0wBRQ2kjqIaJL5W4CHyhTHPUFcjBrNmxgwC0BJN+cSZw9fkFlIZGzsfS4WYFxzouEZ6LXLjqJXqwhi6uoQqoEhHPITp6sQ8u1ZF7OuQ35q0tZBwt84ZvAo+i59GA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEB0u1A8srBwhdMy9S5+W8C38qv6l9JxhOaVO1E8f3FHDpv6eTSEDWXvUKEOxZcce5cGUF/9tdW2z5M+pwjt2B9jcKNQEpARgkAmAwBBQ2kjqIaJL5W4CHyhTHPUFcjBrNmzAFFJOvH2V2J30vUkl3ZbhqhwBP2wVXGDALQJHZ9heIDcBg2DGc2b18rirq/5aZ2rsyP9BAE1zeTqSYj/pqKyeMS+hCx69jOqh/eAeDpeAzvL7JmKVLB0JLV1sY", + "254": 6 + } + ], + "0/62/1": [ + { + "1": "BER19ZLOakFRLvKKC9VsWzN+xv5V5yHHBFdX7ip/cNhnzVfnaNLLHKGU/DtcNZtU/YH+8kUcWKYvknk1TCcrG4k=", + "2": 24582, + "3": 9865010379846957597, + "4": 3118002441518404838, + "5": "", + "254": 5 + }, + { + "1": "BJUrvCXfXiwdfapIXt1qCtJoem+s2gZJ2KBDQZcPVP1cAYECu6Fjjz2MhMy6OW8ASGmWuke+YavIzIZWYEd6BJU=", + "2": 4939, + "3": 2, + "4": 36, + "5": "", + "254": 6 + } + ], + "0/62/2": 5, + "0/62/3": 2, + "0/62/4": [ + "FTABAQEkAgE3AycU3rGzlMtTrxYYJgQAus0sJgUAwGVSNwYnFN6xs5TLU68WGCQHASQIATAJQQREdfWSzmpBUS7yigvVbFszfsb+VechxwRXV+4qf3DYZ81X52jSyxyhlPw7XDWbVP2B/vJFHFimL5J5NUwnKxuJNwo1ASkBGCQCYDAEFMurIH6818tAIcTnwEZO5c+1WAH8MAUUy6sgfrzXy0AhxOfARk7lz7VYAfwYMAtAM2db17wMsM+JMtR4c2Iaz8nHLI4mVbsPGILOBujrzguB2C7p8Q9x8Cw0NgJP7hDV52F9j7IfHjO37aXZA4LqqBg=", + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEElSu8Jd9eLB19qkhe3WoK0mh6b6zaBknYoENBlw9U/VwBgQK7oWOPPYyEzLo5bwBIaZa6R75hq8jMhlZgR3oElTcKNQEpARgkAmAwBBSTrx9ldid9L1JJd2W4aocAT9sFVzAFFJOvH2V2J30vUkl3ZbhqhwBP2wVXGDALQPMYkhQcsrqT5v1vgN1LXJr9skDJ6nnuG0QWfs8SVODLGjU73iO1aQVq+Ir5et9RTD/4VrfnI63DW9RA0N+qgCkY" + ], + "0/62/5": 6, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 0, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/6/0": false, + "1/6/65532": 0, + "1/6/65533": 5, + "1/6/65528": [], + "1/6/65529": [0, 1, 2], + "1/6/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 114, + "1": 1 + } + ], + "1/29/1": [3, 6, 29, 513, 514], + "1/29/2": [], + "1/29/3": [2], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/513/0": 2000, + "1/513/3": 1600, + "1/513/4": 3200, + "1/513/5": 1600, + "1/513/6": 3200, + "1/513/17": 2600, + "1/513/18": 2000, + "1/513/25": 0, + "1/513/27": 4, + "1/513/28": 1, + "1/513/65532": 35, + "1/513/65533": 6, + "1/513/65528": [], + "1/513/65529": [0], + "1/513/65531": [ + 0, 3, 4, 5, 6, 17, 18, 25, 27, 28, 65528, 65529, 65531, 65532, 65533 + ], + "1/514/0": 0, + "1/514/1": 2, + "1/514/2": 0, + "1/514/3": 0, + "1/514/4": 3, + "1/514/5": 0, + "1/514/6": 0, + "1/514/9": 1, + "1/514/10": 0, + "1/514/65532": 11, + "1/514/65533": 4, + "1/514/65528": [], + "1/514/65529": [], + "1/514/65531": [ + 0, 1, 2, 3, 4, 5, 6, 9, 10, 65528, 65529, 65531, 65532, 65533 + ], + "2/3/0": 0, + "2/3/1": 0, + "2/3/65532": 0, + "2/3/65533": 4, + "2/3/65528": [], + "2/3/65529": [0, 64], + "2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "2/29/0": [ + { + "0": 770, + "1": 1 + } + ], + "2/29/1": [3, 29, 1026], + "2/29/2": [], + "2/29/3": [], + "2/29/65532": 0, + "2/29/65533": 2, + "2/29/65528": [], + "2/29/65529": [], + "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "2/1026/0": 0, + "2/1026/1": -500, + "2/1026/2": 6000, + "2/1026/65532": 0, + "2/1026/65533": 1, + "2/1026/65528": [], + "2/1026/65529": [], + "2/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index 80e2d1b72da..de4626ef3d1 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -25,6 +25,16 @@ async def thermostat_fixture( return await setup_integration_with_node_fixture(hass, "thermostat", matter_client) +@pytest.fixture(name="room_airconditioner") +async def room_airconditioner( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a room air conditioner node.""" + return await setup_integration_with_node_fixture( + hass, "room-airconditioner", matter_client + ) + + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_thermostat( @@ -387,3 +397,18 @@ async def test_thermostat( clusters.Thermostat.Enums.SetpointAdjustMode.kCool, -40 ), ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_room_airconditioner( + hass: HomeAssistant, + matter_client: MagicMock, + room_airconditioner: MatterNode, +) -> None: + """Test if a climate entity is created for a Room Airconditioner device.""" + state = hass.states.get("climate.room_airconditioner") + assert state + assert state.attributes["current_temperature"] == 20 + assert state.attributes["min_temp"] == 16 + assert state.attributes["max_temp"] == 32 From f8edab0c12628b2724fd6d540e67efc0cfbc06a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=98yvind=20Matheson=20Wergeland?= Date: Thu, 28 Mar 2024 13:31:55 +0100 Subject: [PATCH 006/426] =?UTF-8?q?Avoid=20changing=20local=20time=20on=20?= =?UTF-8?q?Nob=C3=B8=20Ecohub=20(#114332)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit nobo_hub: Pass timezone to avoid changing local time on Nobø Ecohub in handshake --- homeassistant/components/nobo_hub/__init__.py | 9 ++++++++- homeassistant/components/nobo_hub/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 11 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/nobo_hub/__init__.py b/homeassistant/components/nobo_hub/__init__.py index 15a4b48c315..f9d2ce2e3da 100644 --- a/homeassistant/components/nobo_hub/__init__.py +++ b/homeassistant/components/nobo_hub/__init__.py @@ -7,6 +7,7 @@ from pynobo import nobo from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_IP_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util from .const import CONF_AUTO_DISCOVERED, CONF_SERIAL, DOMAIN @@ -19,7 +20,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: serial = entry.data[CONF_SERIAL] discover = entry.data[CONF_AUTO_DISCOVERED] ip_address = None if discover else entry.data[CONF_IP_ADDRESS] - hub = nobo(serial=serial, ip=ip_address, discover=discover, synchronous=False) + hub = nobo( + serial=serial, + ip=ip_address, + discover=discover, + synchronous=False, + timezone=dt_util.DEFAULT_TIME_ZONE, + ) await hub.connect() hass.data.setdefault(DOMAIN, {}) diff --git a/homeassistant/components/nobo_hub/manifest.json b/homeassistant/components/nobo_hub/manifest.json index 9ddbed7dadc..4741eb39e29 100644 --- a/homeassistant/components/nobo_hub/manifest.json +++ b/homeassistant/components/nobo_hub/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nobo_hub", "integration_type": "hub", "iot_class": "local_push", - "requirements": ["pynobo==1.6.0"] + "requirements": ["pynobo==1.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index ace129c69f2..a93913d7272 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1991,7 +1991,7 @@ pynetgear==0.10.10 pynetio==0.1.9.1 # homeassistant.components.nobo_hub -pynobo==1.6.0 +pynobo==1.8.0 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b01a7ca8ba4..106b8debcdf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1545,7 +1545,7 @@ pymysensors==0.24.0 pynetgear==0.10.10 # homeassistant.components.nobo_hub -pynobo==1.6.0 +pynobo==1.8.0 # homeassistant.components.nuki pynuki==1.6.3 From a91c03b16472db1d716b922c326fc194310aaa1d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Mar 2024 00:44:01 +0100 Subject: [PATCH 007/426] Don't access States.last_reported_ts before it's added (#114333) --- homeassistant/components/recorder/const.py | 1 + homeassistant/components/recorder/core.py | 3 +- .../components/recorder/history/modern.py | 27 +- tests/components/recorder/db_schema_42.py | 838 +++++++++++ .../recorder/test_history_db_schema_42.py | 1278 +++++++++++++++++ 5 files changed, 2138 insertions(+), 9 deletions(-) create mode 100644 tests/components/recorder/db_schema_42.py create mode 100644 tests/components/recorder/test_history_db_schema_42.py diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 66d46c0c20e..1869bb32239 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -53,6 +53,7 @@ STATISTICS_ROWS_SCHEMA_VERSION = 23 CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36 EVENT_TYPE_IDS_SCHEMA_VERSION = 37 STATES_META_SCHEMA_VERSION = 38 +LAST_REPORTED_SCHEMA_VERSION = 43 LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28 diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 7de9cf46311..0e404ce4da0 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -47,6 +47,7 @@ from .const import ( DOMAIN, ESTIMATED_QUEUE_ITEM_SIZE, KEEPALIVE_TIME, + LAST_REPORTED_SCHEMA_VERSION, LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION, MARIADB_PYMYSQL_URL_PREFIX, MARIADB_URL_PREFIX, @@ -1203,7 +1204,7 @@ class Recorder(threading.Thread): if ( pending_last_reported := self.states_manager.get_pending_last_reported_timestamp() - ): + ) and self.schema_version >= LAST_REPORTED_SCHEMA_VERSION: with session.no_autoflush: session.execute( update(States), diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index a909f799ea9..5fd4f415e02 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant, State, split_entity_id import homeassistant.util.dt as dt_util from ... import recorder +from ..const import LAST_REPORTED_SCHEMA_VERSION from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States from ..filters import Filters from ..models import ( @@ -327,9 +328,10 @@ def _state_changed_during_period_stmt( limit: int | None, include_start_time_state: bool, run_start_ts: float | None, + include_last_reported: bool, ) -> Select | CompoundSelect: stmt = ( - _stmt_and_join_attributes(no_attributes, False, True) + _stmt_and_join_attributes(no_attributes, False, include_last_reported) .filter( ( (States.last_changed_ts == States.last_updated_ts) @@ -361,22 +363,22 @@ def _state_changed_during_period_stmt( single_metadata_id, no_attributes, False, - True, + include_last_reported, ).subquery(), no_attributes, False, - True, + include_last_reported, ), _select_from_subquery( stmt.subquery(), no_attributes, False, - True, + include_last_reported, ), ).subquery(), no_attributes, False, - True, + include_last_reported, ) @@ -391,6 +393,9 @@ def state_changes_during_period( include_start_time_state: bool = True, ) -> MutableMapping[str, list[State]]: """Return states changes during UTC period start_time - end_time.""" + has_last_reported = ( + recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION + ) if not entity_id: raise ValueError("entity_id must be provided") entity_ids = [entity_id.lower()] @@ -423,12 +428,14 @@ def state_changes_during_period( limit, include_start_time_state, run_start_ts, + has_last_reported, ), track_on=[ bool(end_time_ts), no_attributes, bool(limit), include_start_time_state, + has_last_reported, ], ) return cast( @@ -475,10 +482,10 @@ def _get_last_state_changes_single_stmt(metadata_id: int) -> Select: def _get_last_state_changes_multiple_stmt( - number_of_states: int, metadata_id: int + number_of_states: int, metadata_id: int, include_last_reported: bool ) -> Select: return ( - _stmt_and_join_attributes(False, False, True) + _stmt_and_join_attributes(False, False, include_last_reported) .where( States.state_id == ( @@ -500,6 +507,9 @@ def get_last_state_changes( hass: HomeAssistant, number_of_states: int, entity_id: str ) -> MutableMapping[str, list[State]]: """Return the last number_of_states.""" + has_last_reported = ( + recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION + ) entity_id_lower = entity_id.lower() entity_ids = [entity_id_lower] @@ -524,8 +534,9 @@ def get_last_state_changes( else: stmt = lambda_stmt( lambda: _get_last_state_changes_multiple_stmt( - number_of_states, metadata_id + number_of_states, metadata_id, has_last_reported ), + track_on=[has_last_reported], ) states = list(execute_stmt_lambda_element(session, stmt, orm_rows=False)) return cast( diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py new file mode 100644 index 00000000000..b8e49aef592 --- /dev/null +++ b/tests/components/recorder/db_schema_42.py @@ -0,0 +1,838 @@ +"""Models for SQLAlchemy. + +This file contains the model definitions for schema version 42. +It is used to test the schema migration logic. +""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any, Self, cast + +import ciso8601 +from fnv_hash_fast import fnv1a_32 +from sqlalchemy import ( + CHAR, + JSON, + BigInteger, + Boolean, + ColumnElement, + DateTime, + Float, + ForeignKey, + Identity, + Index, + Integer, + LargeBinary, + SmallInteger, + String, + Text, + case, + type_coerce, +) +from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship +from sqlalchemy.types import TypeDecorator + +from homeassistant.components.recorder.const import ( + ALL_DOMAIN_EXCLUDE_ATTRS, + SupportedDialect, +) +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticDataTimestamp, + StatisticMetaData, + bytes_to_ulid_or_none, + bytes_to_uuid_hex_or_none, + datetime_to_timestamp_or_none, + process_timestamp, + ulid_to_bytes_or_none, + uuid_hex_to_bytes_or_none, +) +from homeassistant.const import ( + MAX_LENGTH_EVENT_EVENT_TYPE, + MAX_LENGTH_STATE_ENTITY_ID, + MAX_LENGTH_STATE_STATE, +) +from homeassistant.core import Context, Event, EventOrigin, State +from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null +import homeassistant.util.dt as dt_util +from homeassistant.util.json import ( + JSON_DECODE_EXCEPTIONS, + json_loads, + json_loads_object, +) + + +# SQLAlchemy Schema +class Base(DeclarativeBase): + """Base class for tables.""" + + +SCHEMA_VERSION = 42 + +_LOGGER = logging.getLogger(__name__) + +TABLE_EVENTS = "events" +TABLE_EVENT_DATA = "event_data" +TABLE_EVENT_TYPES = "event_types" +TABLE_STATES = "states" +TABLE_STATE_ATTRIBUTES = "state_attributes" +TABLE_STATES_META = "states_meta" +TABLE_RECORDER_RUNS = "recorder_runs" +TABLE_SCHEMA_CHANGES = "schema_changes" +TABLE_STATISTICS = "statistics" +TABLE_STATISTICS_META = "statistics_meta" +TABLE_STATISTICS_RUNS = "statistics_runs" +TABLE_STATISTICS_SHORT_TERM = "statistics_short_term" + +STATISTICS_TABLES = ("statistics", "statistics_short_term") + +MAX_STATE_ATTRS_BYTES = 16384 +MAX_EVENT_DATA_BYTES = 32768 + +PSQL_DIALECT = SupportedDialect.POSTGRESQL + +ALL_TABLES = [ + TABLE_STATES, + TABLE_STATE_ATTRIBUTES, + TABLE_EVENTS, + TABLE_EVENT_DATA, + TABLE_EVENT_TYPES, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, + TABLE_STATES_META, + TABLE_STATISTICS, + TABLE_STATISTICS_META, + TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, +] + +TABLES_TO_CHECK = [ + TABLE_STATES, + TABLE_EVENTS, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, +] + +LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts" +METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" +EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" +STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" +LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" +LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" +CONTEXT_ID_BIN_MAX_LENGTH = 16 + +MYSQL_COLLATE = "utf8mb4_unicode_ci" +MYSQL_DEFAULT_CHARSET = "utf8mb4" +MYSQL_ENGINE = "InnoDB" + +_DEFAULT_TABLE_ARGS = { + "mysql_default_charset": MYSQL_DEFAULT_CHARSET, + "mysql_collate": MYSQL_COLLATE, + "mysql_engine": MYSQL_ENGINE, + "mariadb_default_charset": MYSQL_DEFAULT_CHARSET, + "mariadb_collate": MYSQL_COLLATE, + "mariadb_engine": MYSQL_ENGINE, +} + + +class UnusedDateTime(DateTime): + """An unused column type that behaves like a datetime.""" + + +class Unused(CHAR): + """An unused column type that behaves like a string.""" + + +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" + return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) + + +@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile Unused as CHAR(1) on postgresql.""" + return "CHAR(1)" # Uses 1 byte + + +class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): + """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" + + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + """Offload the datetime parsing to ciso8601.""" + return lambda value: None if value is None else ciso8601.parse_datetime(value) + + +class NativeLargeBinary(LargeBinary): + """A faster version of LargeBinary for engines that support python bytes natively.""" + + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + """No conversion needed for engines that support native bytes.""" + return None + + +# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 +# for sqlite and postgresql we use a bigint +UINT_32_TYPE = BigInteger().with_variant( + mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call] + "mysql", + "mariadb", +) +JSON_VARIANT_CAST = Text().with_variant( + postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +JSONB_VARIANT_CAST = Text().with_variant( + postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +DATETIME_TYPE = ( + DateTime(timezone=True) + .with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call] +) +DOUBLE_TYPE = ( + Float() + .with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(oracle.DOUBLE_PRECISION(), "oracle") + .with_variant(postgresql.DOUBLE_PRECISION(), "postgresql") +) +UNUSED_LEGACY_COLUMN = Unused(0) +UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True) +UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger() +DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION" +CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant( + NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite" +) + +TIMESTAMP_TYPE = DOUBLE_TYPE + + +class JSONLiteral(JSON): + """Teach SA how to literalize json.""" + + def literal_processor(self, dialect: Dialect) -> Callable[[Any], str]: + """Processor to convert a value to JSON.""" + + def process(value: Any) -> str: + """Dump json.""" + return JSON_DUMP(value) + + return process + + +EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote] +EVENT_ORIGIN_TO_IDX = {origin: idx for idx, origin in enumerate(EVENT_ORIGIN_ORDER)} + + +class Events(Base): + """Event history data.""" + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index( + "ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts" + ), + Index( + EVENTS_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_EVENTS + event_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column(SmallInteger) + time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + data_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_data.data_id"), index=True + ) + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + event_type_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_types.event_type_id") + ) + event_data_rel: Mapped[EventData | None] = relationship("EventData") + event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @property + def _time_fired_isotime(self) -> str | None: + """Return time_fired as an isotime string.""" + date_time: datetime | None + if self.time_fired_ts is not None: + date_time = dt_util.utc_from_timestamp(self.time_fired_ts) + else: + date_time = process_timestamp(self.time_fired) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event) -> Events: + """Create an event database object from a native event.""" + return Events( + event_type=None, + event_data=None, + origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin), + time_fired=None, + time_fired_ts=event.time_fired_timestamp, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(event.context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(event.context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(event.context.parent_id), + ) + + def to_native(self, validate_entity_id: bool = True) -> Event | None: + """Convert to a native HA Event.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + return Event( + self.event_type or "", + json_loads_object(self.event_data) if self.event_data else {}, + EventOrigin(self.origin) + if self.origin + else EVENT_ORIGIN_ORDER[self.origin_idx or 0], + dt_util.utc_from_timestamp(self.time_fired_ts or 0), + context=context, + ) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting to event: %s", self) + return None + + +class EventData(Base): + """Event data history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_DATA + data_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_data: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @staticmethod + def shared_data_bytes_from_event( + event: Event, dialect: SupportedDialect | None + ) -> bytes: + """Create shared_data from an event.""" + if dialect == SupportedDialect.POSTGRESQL: + bytes_result = json_bytes_strip_null(event.data) + bytes_result = json_bytes(event.data) + if len(bytes_result) > MAX_EVENT_DATA_BYTES: + _LOGGER.warning( + "Event data for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Event data " + "will not be stored", + event.event_type, + MAX_EVENT_DATA_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_data_bytes(shared_data_bytes: bytes) -> int: + """Return the hash of json encoded shared data.""" + return fnv1a_32(shared_data_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to an event data dictionary.""" + shared_data = self.shared_data + if shared_data is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_data)) + except JSON_DECODE_EXCEPTIONS: + _LOGGER.exception("Error converting row to event data: %s", self) + return {} + + +class EventTypes(Base): + """Event type history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_TYPES + event_type_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class States(Base): + """State change history.""" + + __table_args__ = ( + # Used for fetching the state of entities at a specific time + # (get_states in history.py) + Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"), + Index( + STATES_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATES + state_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE)) + attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN) + last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_updated_ts: Mapped[float | None] = mapped_column( + TIMESTAMP_TYPE, default=time.time, index=True + ) + old_state_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states.state_id"), index=True + ) + attributes_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("state_attributes.attributes_id"), index=True + ) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column( + SmallInteger + ) # 0 is local, 1 is remote + old_state: Mapped[States | None] = relationship("States", remote_side=[state_id]) + state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes") + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + metadata_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states_meta.metadata_id") + ) + states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @property + def _last_updated_isotime(self) -> str | None: + """Return last_updated as an isotime string.""" + date_time: datetime | None + if self.last_updated_ts is not None: + date_time = dt_util.utc_from_timestamp(self.last_updated_ts) + else: + date_time = process_timestamp(self.last_updated) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event) -> States: + """Create object from a state_changed event.""" + entity_id = event.data["entity_id"] + state: State | None = event.data.get("new_state") + dbstate = States( + entity_id=entity_id, + attributes=None, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(event.context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(event.context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(event.context.parent_id), + origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin), + last_updated=None, + last_changed=None, + ) + # None state means the state was removed from the state machine + if state is None: + dbstate.state = "" + dbstate.last_updated_ts = event.time_fired_timestamp + dbstate.last_changed_ts = None + return dbstate + + dbstate.state = state.state + dbstate.last_updated_ts = state.last_updated_timestamp + if state.last_updated == state.last_changed: + dbstate.last_changed_ts = None + else: + dbstate.last_changed_ts = state.last_changed_timestamp + + return dbstate + + def to_native(self, validate_entity_id: bool = True) -> State | None: + """Convert to an HA state object.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + attrs = json_loads_object(self.attributes) if self.attributes else {} + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state: %s", self) + return None + if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts: + last_changed = last_updated = dt_util.utc_from_timestamp( + self.last_updated_ts or 0 + ) + else: + last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0) + return State( + self.entity_id or "", + self.state, # type: ignore[arg-type] + # Join the state_attributes table on attributes_id to get the attributes + # for newer states + attrs, + last_changed, + last_updated, + context=context, + validate_entity_id=validate_entity_id, + ) + + +class StateAttributes(Base): + """State attribute change history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATE_ATTRIBUTES + attributes_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_attrs: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @staticmethod + def shared_attrs_bytes_from_event( + event: Event, + dialect: SupportedDialect | None, + ) -> bytes: + """Create shared_attrs from a state_changed event.""" + state: State | None = event.data.get("new_state") + # None state means the state was removed from the state machine + if state is None: + return b"{}" + if state_info := state.state_info: + exclude_attrs = { + *ALL_DOMAIN_EXCLUDE_ATTRS, + *state_info["unrecorded_attributes"], + } + else: + exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS + encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes + bytes_result = encoder( + {k: v for k, v in state.attributes.items() if k not in exclude_attrs} + ) + if len(bytes_result) > MAX_STATE_ATTRS_BYTES: + _LOGGER.warning( + "State attributes for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Attributes " + "will not be stored", + state.entity_id, + MAX_STATE_ATTRS_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int: + """Return the hash of json encoded shared attributes.""" + return fnv1a_32(shared_attrs_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to a state attributes dictionary.""" + shared_attrs = self.shared_attrs + if shared_attrs is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_attrs)) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state attributes: %s", self) + return {} + + +class StatesMeta(Base): + """Metadata for states.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATES_META + metadata_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsBase: + """Statistics base class.""" + + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time) + metadata_id: Mapped[int | None] = mapped_column( + Integer, + ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), + ) + start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + state: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + + duration: timedelta + + @classmethod + def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: + """Create object from a statistics with datatime objects.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=dt_util.utc_to_timestamp(stats["start"]), + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + @classmethod + def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: + """Create object from a statistics with timestamps.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=stats["start_ts"], + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=stats.get("last_reset_ts"), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + +class Statistics(Base, StatisticsBase): + """Long term statistics.""" + + duration = timedelta(hours=1) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + ) + __tablename__ = TABLE_STATISTICS + + +class StatisticsShortTerm(Base, StatisticsBase): + """Short term statistics.""" + + duration = timedelta(minutes=5) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_short_term_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + ) + __tablename__ = TABLE_STATISTICS_SHORT_TERM + + +class StatisticsMeta(Base): + """Statistics meta data.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATISTICS_META + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + statistic_id: Mapped[str | None] = mapped_column( + String(255), index=True, unique=True + ) + source: Mapped[str | None] = mapped_column(String(32)) + unit_of_measurement: Mapped[str | None] = mapped_column(String(255)) + has_mean: Mapped[bool | None] = mapped_column(Boolean) + has_sum: Mapped[bool | None] = mapped_column(Boolean) + name: Mapped[str | None] = mapped_column(String(255)) + + @staticmethod + def from_meta(meta: StatisticMetaData) -> StatisticsMeta: + """Create object from meta data.""" + return StatisticsMeta(**meta) + + +class RecorderRuns(Base): + """Representation of recorder run.""" + + __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) + __tablename__ = TABLE_RECORDER_RUNS + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE) + closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False) + created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + end = ( + f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None + ) + return ( + f"" + ) + + def to_native(self, validate_entity_id: bool = True) -> Self: + """Return self, native format is this model.""" + return self + + +class SchemaChanges(Base): + """Representation of schema version changes.""" + + __tablename__ = TABLE_SCHEMA_CHANGES + change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + schema_version: Mapped[int | None] = mapped_column(Integer) + changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsRuns(Base): + """Representation of statistics run.""" + + __tablename__ = TABLE_STATISTICS_RUNS + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + +EVENT_DATA_JSON = type_coerce( + EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) +OLD_FORMAT_EVENT_DATA_JSON = type_coerce( + Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) + +SHARED_ATTRS_JSON = type_coerce( + StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) +OLD_FORMAT_ATTRS_JSON = type_coerce( + States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) + +ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"] +OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"] +DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"] +OLD_STATE = aliased(States, name="old_state") + +SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case( + (StateAttributes.shared_attrs.is_(None), States.attributes), + else_=StateAttributes.shared_attrs, +).label("attributes") +SHARED_DATA_OR_LEGACY_EVENT_DATA = case( + (EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data +).label("event_data") diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py new file mode 100644 index 00000000000..98ed6089de6 --- /dev/null +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -0,0 +1,1278 @@ +"""The tests the History component.""" + +from __future__ import annotations + +from collections.abc import Callable +from copy import copy +from datetime import datetime, timedelta +import json +from unittest.mock import patch, sentinel + +from freezegun import freeze_time +import pytest +from sqlalchemy import text + +from homeassistant.components import recorder +from homeassistant.components.recorder import Recorder, get_instance, history +from homeassistant.components.recorder.filters import Filters +from homeassistant.components.recorder.history import legacy +from homeassistant.components.recorder.models import process_timestamp +from homeassistant.components.recorder.models.legacy import ( + LegacyLazyState, + LegacyLazyStatePreSchema31, +) +from homeassistant.components.recorder.util import session_scope +import homeassistant.core as ha +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.json import JSONEncoder +import homeassistant.util.dt as dt_util + +from .common import ( + assert_dict_of_states_equal_without_context_and_last_changed, + assert_multiple_states_equal_without_context, + assert_multiple_states_equal_without_context_and_last_changed, + assert_states_equal_without_context, + async_recorder_block_till_done, + async_wait_recording_done, + old_db_schema, + wait_recording_done, +) +from .db_schema_42 import Events, RecorderRuns, StateAttributes, States, StatesMeta + +from tests.typing import RecorderInstanceGenerator + + +@pytest.fixture(autouse=True) +def db_schema_42(): + """Fixture to initialize the db with the old schema 42.""" + with old_db_schema("42"): + yield + + +async def _async_get_states( + hass: HomeAssistant, + utc_point_in_time: datetime, + entity_ids: list[str] | None = None, + run: RecorderRuns | None = None, + no_attributes: bool = False, +): + """Get states from the database.""" + + def _get_states_with_session(): + with session_scope(hass=hass, read_only=True) as session: + attr_cache = {} + pre_31_schema = get_instance(hass).schema_version < 31 + return [ + LegacyLazyStatePreSchema31(row, attr_cache, None) + if pre_31_schema + else LegacyLazyState( + row, + attr_cache, + None, + row.entity_id, + ) + for row in legacy._get_rows_with_session( + hass, + session, + utc_point_in_time, + entity_ids, + run, + no_attributes, + ) + ] + + return await recorder.get_instance(hass).async_add_executor_job( + _get_states_with_session + ) + + +def _add_db_entries( + hass: ha.HomeAssistant, point: datetime, entity_ids: list[str] +) -> None: + with session_scope(hass=hass) as session: + for idx, entity_id in enumerate(entity_ids): + session.add( + Events( + event_id=1001 + idx, + event_type="state_changed", + event_data="{}", + origin="LOCAL", + time_fired=point, + ) + ) + session.add( + States( + entity_id=entity_id, + state="on", + attributes='{"name":"the light"}', + last_changed=None, + last_updated=point, + event_id=1001 + idx, + attributes_id=1002 + idx, + ) + ) + session.add( + StateAttributes( + shared_attrs='{"name":"the shared light"}', + hash=1234 + idx, + attributes_id=1002 + idx, + ) + ) + + +def test_get_full_significant_states_with_session_entity_no_matches( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test getting states at a specific point in time for entities that never have been recorded.""" + hass = hass_recorder() + now = dt_util.utcnow() + time_before_recorder_ran = now - timedelta(days=1000) + with session_scope(hass=hass, read_only=True) as session: + assert ( + history.get_full_significant_states_with_session( + hass, session, time_before_recorder_ran, now, entity_ids=["demo.id"] + ) + == {} + ) + assert ( + history.get_full_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id", "demo.id2"], + ) + == {} + ) + + +def test_significant_states_with_session_entity_minimal_response_no_matches( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test getting states at a specific point in time for entities that never have been recorded.""" + hass = hass_recorder() + now = dt_util.utcnow() + time_before_recorder_ran = now - timedelta(days=1000) + with session_scope(hass=hass, read_only=True) as session: + assert ( + history.get_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id"], + minimal_response=True, + ) + == {} + ) + assert ( + history.get_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id", "demo.id2"], + minimal_response=True, + ) + == {} + ) + + +def test_significant_states_with_session_single_entity( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test get_significant_states_with_session with a single entity.""" + hass = hass_recorder() + hass.states.set("demo.id", "any", {"attr": True}) + hass.states.set("demo.id", "any2", {"attr": True}) + wait_recording_done(hass) + now = dt_util.utcnow() + with session_scope(hass=hass, read_only=True) as session: + states = history.get_significant_states_with_session( + hass, + session, + now - timedelta(days=1), + now, + entity_ids=["demo.id"], + minimal_response=False, + ) + assert len(states["demo.id"]) == 2 + + +@pytest.mark.parametrize( + ("attributes", "no_attributes", "limit"), + [ + ({"attr": True}, False, 5000), + ({}, True, 5000), + ({"attr": True}, False, 3), + ({}, True, 3), + ], +) +def test_state_changes_during_period( + hass_recorder: Callable[..., HomeAssistant], attributes, no_attributes, limit +) -> None: + """Test state change during period.""" + hass = hass_recorder() + entity_id = "media_player.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state, attributes) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [ + set_state("idle"), + set_state("Netflix"), + set_state("Plex"), + set_state("YouTube"), + ] + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, limit=limit + ) + + assert_multiple_states_equal_without_context(states[:limit], hist[entity_id]) + + +def test_state_changes_during_period_last_reported( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test state change during period.""" + hass = hass_recorder() + entity_id = "media_player.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state) + wait_recording_done(hass) + return ha.State.from_dict(hass.states.get(entity_id).as_dict()) + + start = dt_util.utcnow() + point1 = start + timedelta(seconds=1) + point2 = point1 + timedelta(seconds=1) + end = point2 + timedelta(seconds=1) + + with freeze_time(start) as freezer: + set_state("idle") + + freezer.move_to(point1) + states = [set_state("YouTube")] + + freezer.move_to(point2) + set_state("YouTube") + + freezer.move_to(end) + set_state("Netflix") + + hist = history.state_changes_during_period(hass, start, end, entity_id) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + +def test_state_changes_during_period_descending( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test state change during period descending.""" + hass = hass_recorder() + entity_id = "media_player.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state, {"any": 1}) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow().replace(microsecond=0) + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=100) + point3 = start + timedelta(seconds=1, microseconds=200) + point4 = start + timedelta(seconds=1, microseconds=300) + end = point + timedelta(seconds=1, microseconds=400) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [set_state("idle")] + + freezer.move_to(point2) + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(point4) + states.append(set_state("YouTube")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes=False, descending=False + ) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes=False, descending=True + ) + assert_multiple_states_equal_without_context( + states, list(reversed(list(hist[entity_id]))) + ) + + start_time = point2 + timedelta(microseconds=10) + hist = history.state_changes_during_period( + hass, + start_time, # Pick a point where we will generate a start time state + end, + entity_id, + no_attributes=False, + descending=True, + include_start_time_state=True, + ) + hist_states = list(hist[entity_id]) + assert hist_states[-1].last_updated == start_time + assert hist_states[-1].last_changed == start_time + assert len(hist_states) == 3 + # Make sure they are in descending order + assert ( + hist_states[0].last_updated + > hist_states[1].last_updated + > hist_states[2].last_updated + ) + assert ( + hist_states[0].last_changed + > hist_states[1].last_changed + > hist_states[2].last_changed + ) + hist = history.state_changes_during_period( + hass, + start_time, # Pick a point where we will generate a start time state + end, + entity_id, + no_attributes=False, + descending=False, + include_start_time_state=True, + ) + hist_states = list(hist[entity_id]) + assert hist_states[0].last_updated == start_time + assert hist_states[0].last_changed == start_time + assert len(hist_states) == 3 + # Make sure they are in ascending order + assert ( + hist_states[0].last_updated + < hist_states[1].last_updated + < hist_states[2].last_updated + ) + assert ( + hist_states[0].last_changed + < hist_states[1].last_changed + < hist_states[2].last_changed + ) + + +def test_get_last_state_changes(hass_recorder: Callable[..., HomeAssistant]) -> None: + """Test number of state changes.""" + hass = hass_recorder() + entity_id = "sensor.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + point2 = point + timedelta(minutes=1, seconds=1) + states = [] + + with freeze_time(start) as freezer: + set_state("1") + + freezer.move_to(point) + states.append(set_state("2")) + + freezer.move_to(point2) + states.append(set_state("3")) + + hist = history.get_last_state_changes(hass, 2, entity_id) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + +def test_get_last_state_changes_last_reported( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test number of state changes.""" + hass = hass_recorder() + entity_id = "sensor.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state) + wait_recording_done(hass) + return ha.State.from_dict(hass.states.get(entity_id).as_dict()) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + point2 = point + timedelta(minutes=1, seconds=1) + states = [] + + with freeze_time(start) as freezer: + states.append(set_state("1")) + + freezer.move_to(point) + set_state("1") + + freezer.move_to(point2) + states.append(set_state("2")) + + hist = history.get_last_state_changes(hass, 2, entity_id) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + +def test_get_last_state_change(hass_recorder: Callable[..., HomeAssistant]) -> None: + """Test getting the last state change for an entity.""" + hass = hass_recorder() + entity_id = "sensor.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + point2 = point + timedelta(minutes=1, seconds=1) + states = [] + + with freeze_time(start) as freezer: + set_state("1") + + freezer.move_to(point) + set_state("2") + + freezer.move_to(point2) + states.append(set_state("3")) + + hist = history.get_last_state_changes(hass, 1, entity_id) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + +def test_ensure_state_can_be_copied( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Ensure a state can pass though copy(). + + The filter integration uses copy() on states + from history. + """ + hass = hass_recorder() + entity_id = "sensor.test" + + def set_state(state): + """Set the state.""" + hass.states.set(entity_id, state) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + + with freeze_time(start) as freezer: + set_state("1") + + freezer.move_to(point) + set_state("2") + + hist = history.get_last_state_changes(hass, 2, entity_id) + + assert_states_equal_without_context(copy(hist[entity_id][0]), hist[entity_id][0]) + assert_states_equal_without_context(copy(hist[entity_id][1]), hist[entity_id][1]) + + +def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + hass = hass_recorder() + zero, four, states = record_states(hass) + hist = history.get_significant_states(hass, zero, four, entity_ids=list(states)) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +def test_get_significant_states_minimal_response( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test that only significant states are returned. + + When minimal responses is set only the first and + last states return a complete state. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + hass = hass_recorder() + zero, four, states = record_states(hass) + hist = history.get_significant_states( + hass, zero, four, minimal_response=True, entity_ids=list(states) + ) + entites_with_reducable_states = [ + "media_player.test", + "media_player.test3", + ] + + # All states for media_player.test state are reduced + # down to last_changed and state when minimal_response + # is set except for the first state. + # is set. We use JSONEncoder to make sure that are + # pre-encoded last_changed is always the same as what + # will happen with encoding a native state + for entity_id in entites_with_reducable_states: + entity_states = states[entity_id] + for state_idx in range(1, len(entity_states)): + input_state = entity_states[state_idx] + orig_last_changed = orig_last_changed = json.dumps( + process_timestamp(input_state.last_changed), + cls=JSONEncoder, + ).replace('"', "") + orig_state = input_state.state + entity_states[state_idx] = { + "last_changed": orig_last_changed, + "state": orig_state, + } + + assert len(hist) == len(states) + assert_states_equal_without_context( + states["media_player.test"][0], hist["media_player.test"][0] + ) + assert states["media_player.test"][1] == hist["media_player.test"][1] + assert states["media_player.test"][2] == hist["media_player.test"][2] + + assert_multiple_states_equal_without_context( + states["media_player.test2"], hist["media_player.test2"] + ) + assert_states_equal_without_context( + states["media_player.test3"][0], hist["media_player.test3"][0] + ) + assert states["media_player.test3"][1] == hist["media_player.test3"][1] + + assert_multiple_states_equal_without_context( + states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test"], hist["thermostat.test"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test2"], hist["thermostat.test2"] + ) + + +@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "US/Hawaii", "UTC"]) +def test_get_significant_states_with_initial( + time_zone, hass_recorder: Callable[..., HomeAssistant] +) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + hass = hass_recorder() + hass.config.set_time_zone(time_zone) + zero, four, states = record_states(hass) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + if entity_id == "media_player.test": + states[entity_id] = states[entity_id][1:] + for state in states[entity_id]: + # If the state is recorded before the start time + # start it will have its last_updated and last_changed + # set to the start time. + if state.last_updated < one_and_half: + state.last_updated = one_and_half + state.last_changed = one_and_half + + hist = history.get_significant_states( + hass, one_and_half, four, include_start_time_state=True, entity_ids=list(states) + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +def test_get_significant_states_without_initial( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + hass = hass_recorder() + zero, four, states = record_states(hass) + one = zero + timedelta(seconds=1) + one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + states[entity_id] = list( + filter( + lambda s: s.last_changed != one + and s.last_changed != one_with_microsecond, + states[entity_id], + ) + ) + del states["media_player.test2"] + del states["thermostat.test3"] + + hist = history.get_significant_states( + hass, + one_and_half, + four, + include_start_time_state=False, + entity_ids=list(states), + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +def test_get_significant_states_entity_id( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test that only significant states are returned for one entity.""" + hass = hass_recorder() + zero, four, states = record_states(hass) + del states["media_player.test2"] + del states["media_player.test3"] + del states["thermostat.test"] + del states["thermostat.test2"] + del states["thermostat.test3"] + del states["script.can_cancel_this_one"] + + hist = history.get_significant_states(hass, zero, four, ["media_player.test"]) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +def test_get_significant_states_multiple_entity_ids( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test that only significant states are returned for one entity.""" + hass = hass_recorder() + zero, four, states = record_states(hass) + + hist = history.get_significant_states( + hass, + zero, + four, + ["media_player.test", "thermostat.test"], + ) + + assert_multiple_states_equal_without_context_and_last_changed( + states["media_player.test"], hist["media_player.test"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test"], hist["thermostat.test"] + ) + + +def test_get_significant_states_are_ordered( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test order of results from get_significant_states. + + When entity ids are given, the results should be returned with the data + in the same order. + """ + hass = hass_recorder() + zero, four, _states = record_states(hass) + entity_ids = ["media_player.test", "media_player.test2"] + hist = history.get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + entity_ids = ["media_player.test2", "media_player.test"] + hist = history.get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + + +def test_get_significant_states_only( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test significant states when significant_states_only is set.""" + hass = hass_recorder() + entity_id = "sensor.test" + + def set_state(state, **kwargs): + """Set the state.""" + hass.states.set(entity_id, state, **kwargs) + wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=4) + points = [start + timedelta(minutes=i) for i in range(1, 4)] + + states = [] + with freeze_time(start) as freezer: + set_state("123", attributes={"attribute": 10.64}) + + freezer.move_to(points[0]) + # Attributes are different, state not + states.append(set_state("123", attributes={"attribute": 21.42})) + + freezer.move_to(points[1]) + # state is different, attributes not + states.append(set_state("32", attributes={"attribute": 21.42})) + + freezer.move_to(points[2]) + # everything is different + states.append(set_state("412", attributes={"attribute": 54.23})) + + hist = history.get_significant_states( + hass, + start, + significant_changes_only=True, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 2 + assert not any( + state.last_updated == states[0].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[1].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[2].last_updated for state in hist[entity_id] + ) + + hist = history.get_significant_states( + hass, + start, + significant_changes_only=False, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 3 + assert_multiple_states_equal_without_context_and_last_changed( + states, hist[entity_id] + ) + + +async def test_get_significant_states_only_minimal_response( + recorder_mock: Recorder, hass: HomeAssistant +) -> None: + """Test significant states when significant_states_only is True.""" + now = dt_util.utcnow() + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "changed"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "again"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_wait_recording_done(hass) + + hist = history.get_significant_states( + hass, + now, + minimal_response=True, + significant_changes_only=False, + entity_ids=["sensor.test"], + ) + assert len(hist["sensor.test"]) == 3 + + +def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]: + """Record some test states. + + We inject a bunch of state updates from media player, zone and + thermostat. + """ + mp = "media_player.test" + mp2 = "media_player.test2" + mp3 = "media_player.test3" + therm = "thermostat.test" + therm2 = "thermostat.test2" + therm3 = "thermostat.test3" + zone = "zone.home" + script_c = "script.can_cancel_this_one" + + def set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.set(entity_id, state, **kwargs) + wait_recording_done(hass) + return hass.states.get(entity_id) + + zero = dt_util.utcnow() + one = zero + timedelta(seconds=1) + two = one + timedelta(seconds=1) + three = two + timedelta(seconds=1) + four = three + timedelta(seconds=1) + + states = {therm: [], therm2: [], therm3: [], mp: [], mp2: [], mp3: [], script_c: []} + with freeze_time(one) as freezer: + states[mp].append( + set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) + ) + states[mp2].append( + set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)}) + ) + states[mp3].append( + set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)}) + ) + states[therm].append( + set_state(therm, 20, attributes={"current_temperature": 19.5}) + ) + # This state will be updated + set_state(therm3, 20, attributes={"current_temperature": 19.5}) + + freezer.move_to(one + timedelta(microseconds=1)) + states[mp].append( + set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)}) + ) + + freezer.move_to(two) + # This state will be skipped only different in time + set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)}) + # This state will be skipped because domain is excluded + set_state(zone, "zoning") + states[script_c].append( + set_state(script_c, "off", attributes={"can_cancel": True}) + ) + states[therm].append( + set_state(therm, 21, attributes={"current_temperature": 19.8}) + ) + states[therm2].append( + set_state(therm2, 20, attributes={"current_temperature": 19}) + ) + # This state will be updated + set_state(therm3, 20, attributes={"current_temperature": 19.5}) + + freezer.move_to(three) + states[mp].append( + set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)}) + ) + states[mp3].append( + set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)}) + ) + # Attributes changed even though state is the same + states[therm].append( + set_state(therm, 21, attributes={"current_temperature": 20}) + ) + states[therm3].append( + set_state(therm3, 20, attributes={"current_temperature": 19.5}) + ) + + return zero, four, states + + +async def test_state_changes_during_period_query_during_migration_to_schema_25( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = await async_setup_recorder_instance(hass, {}) + + with patch.object(instance.states_meta_manager, "active", False): + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await recorder.get_instance(hass).async_add_executor_job( + _add_db_entries, hass, point, [entity_id] + ) + + no_attributes = True + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = await async_setup_recorder_instance(hass, {}) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await instance.async_add_executor_job(_add_db_entries, hass, point, [entity_id]) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25_multiple_entities( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = await async_setup_recorder_instance(hass, {}) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id_1 = "light.test" + entity_id_2 = "switch.test" + entity_ids = [entity_id_1, entity_id_2] + + await instance.async_add_executor_job(_add_db_entries, hass, point, entity_ids) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {"name": "the shared light"} + assert hist[1].attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {"name": "the light"} + assert hist[1].attributes == {"name": "the light"} + + +async def test_get_full_significant_states_handles_empty_last_changed( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, +) -> None: + """Test getting states when last_changed is null.""" + await async_setup_recorder_instance(hass, {}) + + now = dt_util.utcnow() + hass.states.async_set("sensor.one", "on", {"attr": "original"}) + state0 = hass.states.get("sensor.one") + await hass.async_block_till_done() + hass.states.async_set("sensor.one", "on", {"attr": "new"}) + state1 = hass.states.get("sensor.one") + + assert state0.last_changed == state1.last_changed + assert state0.last_updated != state1.last_updated + await async_wait_recording_done(hass) + + def _get_entries(): + with session_scope(hass=hass, read_only=True) as session: + return history.get_full_significant_states_with_session( + hass, + session, + now, + dt_util.utcnow(), + entity_ids=["sensor.one"], + significant_changes_only=False, + ) + + states = await recorder.get_instance(hass).async_add_executor_job(_get_entries) + sensor_one_states: list[State] = states["sensor.one"] + assert_states_equal_without_context(sensor_one_states[0], state0) + assert_states_equal_without_context(sensor_one_states[1], state1) + assert sensor_one_states[0].last_changed == sensor_one_states[1].last_changed + assert sensor_one_states[0].last_updated != sensor_one_states[1].last_updated + + def _fetch_native_states() -> list[State]: + with session_scope(hass=hass, read_only=True) as session: + native_states = [] + db_state_attributes = { + state_attributes.attributes_id: state_attributes + for state_attributes in session.query(StateAttributes) + } + metadata_id_to_entity_id = { + states_meta.metadata_id: states_meta + for states_meta in session.query(StatesMeta) + } + for db_state in session.query(States): + db_state.entity_id = metadata_id_to_entity_id[ + db_state.metadata_id + ].entity_id + state = db_state.to_native() + state.attributes = db_state_attributes[ + db_state.attributes_id + ].to_native() + native_states.append(state) + return native_states + + native_sensor_one_states = await recorder.get_instance(hass).async_add_executor_job( + _fetch_native_states + ) + assert_states_equal_without_context(native_sensor_one_states[0], state0) + assert_states_equal_without_context(native_sensor_one_states[1], state1) + assert ( + native_sensor_one_states[0].last_changed + == native_sensor_one_states[1].last_changed + ) + assert ( + native_sensor_one_states[0].last_updated + != native_sensor_one_states[1].last_updated + ) + + def _fetch_db_states() -> list[States]: + with session_scope(hass=hass, read_only=True) as session: + states = list(session.query(States)) + session.expunge_all() + return states + + db_sensor_one_states = await recorder.get_instance(hass).async_add_executor_job( + _fetch_db_states + ) + assert db_sensor_one_states[0].last_changed is None + assert db_sensor_one_states[0].last_changed_ts is None + + assert ( + process_timestamp( + dt_util.utc_from_timestamp(db_sensor_one_states[1].last_changed_ts) + ) + == state0.last_changed + ) + assert db_sensor_one_states[0].last_updated_ts is not None + assert db_sensor_one_states[1].last_updated_ts is not None + assert ( + db_sensor_one_states[0].last_updated_ts + != db_sensor_one_states[1].last_updated_ts + ) + + +def test_state_changes_during_period_multiple_entities_single_test( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test state change during period with multiple entities in the same test. + + This test ensures the sqlalchemy query cache does not + generate incorrect results. + """ + hass = hass_recorder() + start = dt_util.utcnow() + test_entites = {f"sensor.{i}": str(i) for i in range(30)} + for entity_id, value in test_entites.items(): + hass.states.set(entity_id, value) + + wait_recording_done(hass) + end = dt_util.utcnow() + + for entity_id, value in test_entites.items(): + hist = history.state_changes_during_period(hass, start, end, entity_id) + assert len(hist) == 1 + assert hist[entity_id][0].state == value + + +@pytest.mark.freeze_time("2039-01-19 03:14:07.555555-00:00") +async def test_get_full_significant_states_past_year_2038( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, +) -> None: + """Test we can store times past year 2038.""" + await async_setup_recorder_instance(hass, {}) + past_2038_time = dt_util.parse_datetime("2039-01-19 03:14:07.555555-00:00") + hass.states.async_set("sensor.one", "on", {"attr": "original"}) + state0 = hass.states.get("sensor.one") + await hass.async_block_till_done() + + hass.states.async_set("sensor.one", "on", {"attr": "new"}) + state1 = hass.states.get("sensor.one") + + await async_wait_recording_done(hass) + + def _get_entries(): + with session_scope(hass=hass, read_only=True) as session: + return history.get_full_significant_states_with_session( + hass, + session, + past_2038_time - timedelta(days=365), + past_2038_time + timedelta(days=365), + entity_ids=["sensor.one"], + significant_changes_only=False, + ) + + states = await recorder.get_instance(hass).async_add_executor_job(_get_entries) + sensor_one_states: list[State] = states["sensor.one"] + assert_states_equal_without_context(sensor_one_states[0], state0) + assert_states_equal_without_context(sensor_one_states[1], state1) + assert sensor_one_states[0].last_changed == past_2038_time + assert sensor_one_states[0].last_updated == past_2038_time + + +def test_get_significant_states_without_entity_ids_raises( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test at least one entity id is required for get_significant_states.""" + hass = hass_recorder() + now = dt_util.utcnow() + with pytest.raises(ValueError, match="entity_ids must be provided"): + history.get_significant_states(hass, now, None) + + +def test_state_changes_during_period_without_entity_ids_raises( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test at least one entity id is required for state_changes_during_period.""" + hass = hass_recorder() + now = dt_util.utcnow() + with pytest.raises(ValueError, match="entity_id must be provided"): + history.state_changes_during_period(hass, now, None) + + +def test_get_significant_states_with_filters_raises( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test passing filters is no longer supported.""" + hass = hass_recorder() + now = dt_util.utcnow() + with pytest.raises(NotImplementedError, match="Filters are no longer supported"): + history.get_significant_states( + hass, now, None, ["media_player.test"], Filters() + ) + + +def test_get_significant_states_with_non_existent_entity_ids_returns_empty( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test get_significant_states returns an empty dict when entities not in the db.""" + hass = hass_recorder() + now = dt_util.utcnow() + assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {} + + +def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test state_changes_during_period returns an empty dict when entities not in the db.""" + hass = hass_recorder() + now = dt_util.utcnow() + assert ( + history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {} + ) + + +def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty( + hass_recorder: Callable[..., HomeAssistant], +) -> None: + """Test get_last_state_changes returns an empty dict when entities not in the db.""" + hass = hass_recorder() + assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} From 53cc4b8c37feae46f89cf1c0c09ea9e4e5eb8f30 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 27 Mar 2024 22:52:02 +0100 Subject: [PATCH 008/426] Download translations only once in the build pipeline (#114335) --- .github/workflows/builder.yml | 63 ++++++++++++++++++++++++----------- 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 1dc6f7a3938..5dc01eee21e 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -51,6 +51,32 @@ jobs: with: ignore-dev: true + - name: Fail if translations files are checked in + run: | + files=$(find homeassistant/components/*/translations -type f) + + if [ -n "$files" ]; then + echo "Translations files are checked in, please remove the following files:" + echo "$files" + exit 1 + fi + + - name: Download Translations + run: python3 -m script.translations download + env: + LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }} + + - name: Archive translations + shell: bash + run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - + + - name: Upload translations + uses: actions/upload-artifact@v4.3.1 + with: + name: translations + path: translations.tar.gz + if-no-files-found: error + build_base: name: Build ${{ matrix.arch }} base core image if: github.repository_owner == 'home-assistant' @@ -159,10 +185,15 @@ jobs: # are not available. sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt - - name: Download Translations - run: python3 -m script.translations download - env: - LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }} + - name: Download translations + uses: actions/download-artifact@v4.1.4 + with: + name: translations + + - name: Extract translations + run: | + tar xvf translations.tar.gz + rm translations.tar.gz - name: Write meta info file shell: bash @@ -186,17 +217,6 @@ jobs: --target /data \ --generic ${{ needs.init.outputs.version }} - - name: Archive translations - shell: bash - run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - - - name: Upload translations - uses: actions/upload-artifact@v3 - with: - name: translations - path: translations.tar.gz - if-no-files-found: error - build_machine: name: Build ${{ matrix.machine }} machine core image if: github.repository_owner == 'home-assistant' @@ -448,10 +468,15 @@ jobs: with: python-version: ${{ env.DEFAULT_PYTHON }} - - name: Download Translations - run: python3 -m script.translations download - env: - LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }} + - name: Download translations + uses: actions/download-artifact@v4.1.4 + with: + name: translations + + - name: Extract translations + run: | + tar xvf translations.tar.gz + rm translations.tar.gz - name: Build package shell: bash From 824d6afa249fcffaa75c3053f401a7305c300d12 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 27 Mar 2024 22:33:06 +0100 Subject: [PATCH 009/426] Remove checked in translations (#114336) --- .../components/devialet/translations/en.json | 22 ------------------- 1 file changed, 22 deletions(-) delete mode 100644 homeassistant/components/devialet/translations/en.json diff --git a/homeassistant/components/devialet/translations/en.json b/homeassistant/components/devialet/translations/en.json deleted file mode 100644 index af0cfc4c122..00000000000 --- a/homeassistant/components/devialet/translations/en.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Service is already configured" - }, - "error": { - "cannot_connect": "Failed to connect" - }, - "flow_title": "{title}", - "step": { - "confirm": { - "description": "Do you want to set up Devialet device {device}?" - }, - "user": { - "data": { - "host": "Host" - }, - "description": "Please enter the host name or IP address of the Devialet device." - } - } - } -} \ No newline at end of file From 541a6c5f64dad743dec43f07cd0f745462f840db Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Mar 2024 20:58:07 -1000 Subject: [PATCH 010/426] Revert velocity change in powerview (#114337) --- .../components/hunterdouglas_powerview/number.py | 2 +- .../hunterdouglas_powerview/shade_data.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/hunterdouglas_powerview/number.py b/homeassistant/components/hunterdouglas_powerview/number.py index 8551a11337e..b37331c08df 100644 --- a/homeassistant/components/hunterdouglas_powerview/number.py +++ b/homeassistant/components/hunterdouglas_powerview/number.py @@ -41,7 +41,7 @@ def store_velocity( value: float | None, ) -> None: """Store the desired shade velocity in the coordinator.""" - coordinator.data.update_shade_position(shade_id, ShadePosition(velocity=value)) + coordinator.data.update_shade_velocity(shade_id, ShadePosition(velocity=value)) NUMBERS: Final = ( diff --git a/homeassistant/components/hunterdouglas_powerview/shade_data.py b/homeassistant/components/hunterdouglas_powerview/shade_data.py index e6b20312f27..fd2f0466467 100644 --- a/homeassistant/components/hunterdouglas_powerview/shade_data.py +++ b/homeassistant/components/hunterdouglas_powerview/shade_data.py @@ -13,14 +13,11 @@ from .util import async_map_data_by_id _LOGGER = logging.getLogger(__name__) -POSITION_FIELDS = fields(ShadePosition) +POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"] def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition: """Copy position data from source to target for None values only.""" - # the hub will always return a velocity of 0 on initial connect, - # separate definition to store consistent value in HA - # this value is purely driven from HA for field in POSITION_FIELDS: if (value := getattr(source, field.name)) is not None: setattr(target, field.name, value) @@ -76,3 +73,11 @@ class PowerviewShadeData: def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None: """Update a single shades position.""" copy_position_data(new_position, self.get_shade_position(shade_id)) + + def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None: + """Update a single shades velocity.""" + # the hub will always return a velocity of 0 on initial connect, + # separate definition to store consistent value in HA + # this value is purely driven from HA + if shade_data.velocity is not None: + self.get_shade_position(shade_id).velocity = shade_data.velocity From 04bfb1de3cb3e826a7c534327cb661e9e5a929db Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 27 Mar 2024 17:19:34 -0500 Subject: [PATCH 011/426] Add more Ollama models (#114339) Add more models --- homeassistant/components/ollama/const.py | 41 ++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 59f1888cfc7..853370066dc 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -110,5 +110,46 @@ MODEL_NAMES = [ # https://ollama.com/library "starcoder", "phind-codellama", "starcoder2", + "yi", + "orca2", + "falcon", + "wizard-math", + "dolphin-phi", + "starling-lm", + "nous-hermes", + "stable-code", + "medllama2", + "bakllava", + "codeup", + "wizardlm-uncensored", + "solar", + "everythinglm", + "sqlcoder", + "dolphincoder", + "nous-hermes2-mixtral", + "stable-beluga", + "yarn-mistral", + "stablelm2", + "samantha-mistral", + "meditron", + "stablelm-zephyr", + "magicoder", + "yarn-llama2", + "llama-pro", + "deepseek-llm", + "wizard-vicuna", + "codebooga", + "mistrallite", + "all-minilm", + "nexusraven", + "open-orca-platypus2", + "goliath", + "notux", + "megadolphin", + "alfred", + "xwinlm", + "wizardlm", + "duckdb-nsql", + "notus", ] DEFAULT_MODEL = "llama2:latest" From f141be73c77619e39a9dd576f34fd5be4f16e4cc Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Thu, 28 Mar 2024 06:57:02 +0100 Subject: [PATCH 012/426] Bump fjaraskupan to 2.3.0 (#114344) Update fjarakupen to 2.3.0 - Support delayed disconnection - Speed up on/off transitions --- homeassistant/components/fjaraskupan/light.py | 5 +++-- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/fjaraskupan/light.py b/homeassistant/components/fjaraskupan/light.py index 7f33d7806ee..b33904c805d 100644 --- a/homeassistant/components/fjaraskupan/light.py +++ b/homeassistant/components/fjaraskupan/light.py @@ -54,13 +54,14 @@ class Light(CoordinatorEntity[FjaraskupanCoordinator], LightEntity): async with self.coordinator.async_connect_and_update() as device: if ATTR_BRIGHTNESS in kwargs: await device.send_dim(int(kwargs[ATTR_BRIGHTNESS] * (100.0 / 255.0))) - elif not self.is_on: - await device.send_command(COMMAND_LIGHT_ON_OFF) + else: + await device.send_dim(100) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" if self.is_on: async with self.coordinator.async_connect_and_update() as device: + await device.send_dim(0) await device.send_command(COMMAND_LIGHT_ON_OFF) @property diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index f7ad701a756..91c74b68e01 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.2.0"] + "requirements": ["fjaraskupan==2.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a93913d7272..810a73b7fc7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -864,7 +864,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.2.0 +fjaraskupan==2.3.0 # homeassistant.components.flexit_bacnet flexit_bacnet==2.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 106b8debcdf..6f67e8c8b12 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -702,7 +702,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.2.0 +fjaraskupan==2.3.0 # homeassistant.components.flexit_bacnet flexit_bacnet==2.1.0 From f204faf20203b333789ba84b2535191200521dc8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Mar 2024 18:29:43 -1000 Subject: [PATCH 013/426] Fix empty delays in script helper (#114346) fixes ``` Logger: homeassistant.components.automation.kamermaster_knop_4_acties_licht Bron: components/automation/__init__.py:726 integratie: Automatisering (documentatie, problemen) Eerst voorgekomen: 22:17:29 (5 gebeurtenissen) Laatst gelogd: 22:59:24 While executing automation automation.kamermaster_knop_4_acties_licht Traceback (most recent call last): File "/usr/src/homeassistant/homeassistant/components/automation/__init__.py", line 726, in async_trigger return await self.action_script.async_run( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 1645, in async_run return await asyncio.shield(create_eager_task(run.async_run())) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 454, in async_run await self._async_step(log_exceptions=False) File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 506, in _async_step self._handle_exception( File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 536, in _handle_exception raise exception File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 504, in _async_step await getattr(self, handler)() File "/usr/src/homeassistant/homeassistant/helpers/script.py", line 626, in _async_delay_step if timeout_future.done(): ^^^^^^^^^^^^^^^^^^^ AttributeError: 'NoneType' object has no attribute 'done' ``` --- homeassistant/helpers/script.py | 5 +++++ tests/helpers/test_script.py | 25 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 560f3227c4f..a86df259f11 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -615,6 +615,11 @@ class _ScriptRun: delay = delay_delta.total_seconds() self._changed() + if not delay: + # Handle an empty delay + trace_set_result(delay=delay, done=True) + return + trace_set_result(delay=delay, done=False) futures, timeout_handle, timeout_future = self._async_futures_with_timeout( delay diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index c1462ccfc2f..86fb84eb582 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -672,6 +672,31 @@ async def test_delay_basic(hass: HomeAssistant) -> None: ) +async def test_empty_delay(hass: HomeAssistant) -> None: + """Test an empty delay.""" + delay_alias = "delay step" + sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": 0}, "alias": delay_alias}) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + delay_started_flag = async_watch_for_action(script_obj, delay_alias) + + try: + await script_obj.async_run(context=Context()) + await asyncio.wait_for(delay_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + else: + await hass.async_block_till_done() + assert not script_obj.is_running + assert script_obj.last_action is None + + assert_action_trace( + { + "0": [{"result": {"delay": 0.0, "done": True}}], + } + ) + + async def test_multiple_runs_delay(hass: HomeAssistant) -> None: """Test multiple runs with delay in script.""" event = "test_event" From 737e5e70ec2df9576e807f309711d28f49f7ac21 Mon Sep 17 00:00:00 2001 From: Christopher Bailey Date: Thu, 28 Mar 2024 13:48:51 -0400 Subject: [PATCH 014/426] Bump pyunifiprotect to 5.1.2 (#114348) --- homeassistant/components/unifiprotect/config_flow.py | 3 ++- homeassistant/components/unifiprotect/manifest.json | 2 +- homeassistant/components/unifiprotect/utils.py | 3 ++- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/unifiprotect/config_flow.py b/homeassistant/components/unifiprotect/config_flow.py index 555ddcb8d5e..19561a6003d 100644 --- a/homeassistant/components/unifiprotect/config_flow.py +++ b/homeassistant/components/unifiprotect/config_flow.py @@ -261,7 +261,8 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): username=user_input[CONF_USERNAME], password=user_input[CONF_PASSWORD], verify_ssl=verify_ssl, - cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect_cache")), + cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")), + config_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")), ) errors = {} diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 7cfb0ddcc9e..a26fab2e80b 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -41,7 +41,7 @@ "iot_class": "local_push", "loggers": ["pyunifiprotect", "unifi_discovery"], "quality_scale": "platinum", - "requirements": ["pyunifiprotect==5.0.2", "unifi-discovery==1.1.8"], + "requirements": ["pyunifiprotect==5.1.2", "unifi-discovery==1.1.8"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/utils.py b/homeassistant/components/unifiprotect/utils.py index 58474e6a531..8199d729943 100644 --- a/homeassistant/components/unifiprotect/utils.py +++ b/homeassistant/components/unifiprotect/utils.py @@ -145,7 +145,8 @@ def async_create_api_client( override_connection_host=entry.options.get(CONF_OVERRIDE_CHOST, False), ignore_stats=not entry.options.get(CONF_ALL_UPDATES, False), ignore_unadopted=False, - cache_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect_cache")), + cache_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect")), + config_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect")), ) diff --git a/requirements_all.txt b/requirements_all.txt index 810a73b7fc7..ffa57398564 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2339,7 +2339,7 @@ pytrydan==0.4.0 pyudev==0.23.2 # homeassistant.components.unifiprotect -pyunifiprotect==5.0.2 +pyunifiprotect==5.1.2 # homeassistant.components.uptimerobot pyuptimerobot==22.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6f67e8c8b12..b35ef776b47 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1806,7 +1806,7 @@ pytrydan==0.4.0 pyudev==0.23.2 # homeassistant.components.unifiprotect -pyunifiprotect==5.0.2 +pyunifiprotect==5.1.2 # homeassistant.components.uptimerobot pyuptimerobot==22.2.0 From 21bff95bd7ad7160802979791666f0643361173a Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Mar 2024 09:11:02 +0100 Subject: [PATCH 015/426] Fix script for checking on existing translations (#114354) --- .github/workflows/builder.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 5dc01eee21e..217093793d1 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -53,11 +53,9 @@ jobs: - name: Fail if translations files are checked in run: | - files=$(find homeassistant/components/*/translations -type f) - - if [ -n "$files" ]; then + if [ -n "$(find homeassistant/components/*/translations -type f)" ]; then echo "Translations files are checked in, please remove the following files:" - echo "$files" + find homeassistant/components/*/translations -type f exit 1 fi From 42580a1113271911da4b953d9522efc116abeb74 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Mar 2024 10:42:52 +0100 Subject: [PATCH 016/426] Improve utility meter restore state tests (#114356) --- tests/components/utility_meter/test_sensor.py | 91 +++++++++++-------- 1 file changed, 53 insertions(+), 38 deletions(-) diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index c250a66b87a..13b367b1fb7 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -610,7 +610,7 @@ async def test_device_class( "utility_meter": { "energy_bill": { "source": "sensor.energy", - "tariffs": ["onpeak", "midpeak", "offpeak", "superpeak"], + "tariffs": ["tariff1", "tariff2", "tariff3", "tariff4"], } } }, @@ -626,7 +626,7 @@ async def test_device_class( "offset": 0, "periodically_resetting": True, "source": "sensor.energy", - "tariffs": ["onpeak", "midpeak", "offpeak", "superpeak"], + "tariffs": ["tariff1", "tariff2", "tariff3", "tariff4"], }, ), ], @@ -638,82 +638,89 @@ async def test_restore_state( # Home assistant is not runnit yet hass.set_state(CoreState.not_running) - last_reset = "2020-12-21T00:00:00.013073+00:00" + last_reset_1 = "2020-12-21T00:00:00.013073+00:00" + last_reset_2 = "2020-12-22T00:00:00.013073+00:00" mock_restore_cache_with_extra_data( hass, [ + # sensor.energy_bill_tariff1 is restored as expected ( State( - "sensor.energy_bill_onpeak", - "3", + "sensor.energy_bill_tariff1", + "1.1", attributes={ ATTR_STATUS: PAUSED, - ATTR_LAST_RESET: last_reset, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + ATTR_LAST_RESET: last_reset_1, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, }, ), { "native_value": { "__type": "", - "decimal_str": "3", + "decimal_str": "1.2", }, "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "7", - "last_valid_state": "None", + "last_reset": last_reset_2, + "last_period": "1.3", + "last_valid_state": None, "status": "paused", }, ), + # sensor.energy_bill_tariff2 has missing keys and falls back to + # saved state ( State( - "sensor.energy_bill_midpeak", - "5", + "sensor.energy_bill_tariff2", + "2.1", attributes={ ATTR_STATUS: PAUSED, - ATTR_LAST_RESET: last_reset, + ATTR_LAST_RESET: last_reset_1, ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, }, ), { "native_value": { "__type": "", - "decimal_str": "3", + "decimal_str": "2.2", }, "native_unit_of_measurement": "kWh", "last_valid_state": "None", }, ), + # sensor.energy_bill_tariff3 has invalid data and falls back to + # saved state ( State( - "sensor.energy_bill_offpeak", - "6", + "sensor.energy_bill_tariff3", + "3.1", attributes={ ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset, + ATTR_LAST_RESET: last_reset_1, ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, }, ), { "native_value": { "__type": "", - "decimal_str": "3f", + "decimal_str": "3f", # Invalid }, "native_unit_of_measurement": "kWh", "last_valid_state": "None", }, ), + # No extra saved data, fall back to saved state ( State( - "sensor.energy_bill_superpeak", + "sensor.energy_bill_tariff4", "error", attributes={ ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset, + ATTR_LAST_RESET: last_reset_1, ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, }, ), {}, @@ -736,25 +743,28 @@ async def test_restore_state( await hass.async_block_till_done() # restore from cache - state = hass.states.get("sensor.energy_bill_onpeak") - assert state.state == "3" + state = hass.states.get("sensor.energy_bill_tariff1") + assert state.state == "1.2" assert state.attributes.get("status") == PAUSED - assert state.attributes.get("last_reset") == last_reset + assert state.attributes.get("last_reset") == last_reset_2 assert state.attributes.get("last_valid_state") == "None" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - state = hass.states.get("sensor.energy_bill_midpeak") - assert state.state == "5" + state = hass.states.get("sensor.energy_bill_tariff2") + assert state.state == "2.1" + assert state.attributes.get("status") == PAUSED + assert state.attributes.get("last_reset") == last_reset_1 assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - state = hass.states.get("sensor.energy_bill_offpeak") - assert state.state == "6" + state = hass.states.get("sensor.energy_bill_tariff3") + assert state.state == "3.1" assert state.attributes.get("status") == COLLECTING - assert state.attributes.get("last_reset") == last_reset + assert state.attributes.get("last_reset") == last_reset_1 assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - state = hass.states.get("sensor.energy_bill_superpeak") + state = hass.states.get("sensor.energy_bill_tariff4") assert state.state == STATE_UNKNOWN # utility_meter is loaded, now set sensors according to utility_meter: @@ -764,13 +774,18 @@ async def test_restore_state( await hass.async_block_till_done() state = hass.states.get("select.energy_bill") - assert state.state == "onpeak" + assert state.state == "tariff1" - state = hass.states.get("sensor.energy_bill_onpeak") + state = hass.states.get("sensor.energy_bill_tariff1") assert state.attributes.get("status") == COLLECTING - state = hass.states.get("sensor.energy_bill_offpeak") - assert state.attributes.get("status") == PAUSED + for entity_id in ( + "sensor.energy_bill_tariff2", + "sensor.energy_bill_tariff3", + "sensor.energy_bill_tariff4", + ): + state = hass.states.get(entity_id) + assert state.attributes.get("status") == PAUSED @pytest.mark.parametrize( From b143390d8802b0ab69136785ebd3b5d44db78211 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Mar 2024 13:24:44 +0100 Subject: [PATCH 017/426] Improve device class of utility meter (#114368) --- .../components/utility_meter/sensor.py | 37 +++-- tests/components/utility_meter/test_sensor.py | 138 +++++++++++++++--- 2 files changed, 146 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 4e9be403cf7..26582df1b44 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from dataclasses import dataclass from datetime import datetime, timedelta from decimal import Decimal, DecimalException, InvalidOperation @@ -13,6 +14,7 @@ import voluptuous as vol from homeassistant.components.sensor import ( ATTR_LAST_RESET, + DEVICE_CLASS_UNITS, RestoreSensor, SensorDeviceClass, SensorExtraStoredData, @@ -21,12 +23,12 @@ from homeassistant.components.sensor import ( from homeassistant.components.sensor.recorder import _suggest_report_issue from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, CONF_UNIQUE_ID, STATE_UNAVAILABLE, STATE_UNKNOWN, - UnitOfEnergy, ) from homeassistant.core import Event, HomeAssistant, State, callback from homeassistant.helpers import ( @@ -47,6 +49,7 @@ from homeassistant.helpers.template import is_number from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import slugify import homeassistant.util.dt as dt_util +from homeassistant.util.enum import try_parse_enum from .const import ( ATTR_CRON_PATTERN, @@ -97,12 +100,6 @@ ATTR_LAST_PERIOD = "last_period" ATTR_LAST_VALID_STATE = "last_valid_state" ATTR_TARIFF = "tariff" -DEVICE_CLASS_MAP = { - UnitOfEnergy.WATT_HOUR: SensorDeviceClass.ENERGY, - UnitOfEnergy.KILO_WATT_HOUR: SensorDeviceClass.ENERGY, -} - - PRECISION = 3 PAUSED = "paused" COLLECTING = "collecting" @@ -313,6 +310,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData): last_reset: datetime | None last_valid_state: Decimal | None status: str + input_device_class: SensorDeviceClass | None def as_dict(self) -> dict[str, Any]: """Return a dict representation of the utility sensor data.""" @@ -324,6 +322,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData): str(self.last_valid_state) if self.last_valid_state else None ) data["status"] = self.status + data["input_device_class"] = str(self.input_device_class) return data @@ -343,6 +342,9 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData): else None ) status: str = restored["status"] + input_device_class = try_parse_enum( + SensorDeviceClass, restored.get("input_device_class") + ) except KeyError: # restored is a dict, but does not have all values return None @@ -357,6 +359,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData): last_reset, last_valid_state, status, + input_device_class, ) @@ -397,6 +400,7 @@ class UtilityMeterSensor(RestoreSensor): self._last_valid_state = None self._collecting = None self._name = name + self._input_device_class = None self._unit_of_measurement = None self._period = meter_type if meter_type is not None: @@ -416,9 +420,10 @@ class UtilityMeterSensor(RestoreSensor): self._tariff = tariff self._tariff_entity = tariff_entity - def start(self, unit): + def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" - self._unit_of_measurement = unit + self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) + self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) self._state = 0 self.async_write_ha_state() @@ -482,6 +487,7 @@ class UtilityMeterSensor(RestoreSensor): new_state = event.data["new_state"] if new_state is None: return + new_state_attributes: Mapping[str, Any] = new_state.attributes or {} # First check if the new_state is valid (see discussion in PR #88446) if (new_state_val := self._validate_state(new_state)) is None: @@ -498,7 +504,7 @@ class UtilityMeterSensor(RestoreSensor): for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: - sensor.start(new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)) + sensor.start(new_state_attributes) if self._unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", @@ -512,7 +518,8 @@ class UtilityMeterSensor(RestoreSensor): # If net_consumption is off, the adjustment must be non-negative self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line - self._unit_of_measurement = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) + self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -600,6 +607,7 @@ class UtilityMeterSensor(RestoreSensor): if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: # new introduced in 2022.04 self._state = last_sensor_data.native_value + self._input_device_class = last_sensor_data.input_device_class self._unit_of_measurement = last_sensor_data.native_unit_of_measurement self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset @@ -693,7 +701,11 @@ class UtilityMeterSensor(RestoreSensor): @property def device_class(self): """Return the device class of the sensor.""" - return DEVICE_CLASS_MAP.get(self._unit_of_measurement) + if self._input_device_class is not None: + return self._input_device_class + if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: + return SensorDeviceClass.ENERGY + return None @property def state_class(self): @@ -744,6 +756,7 @@ class UtilityMeterSensor(RestoreSensor): self._last_reset, self._last_valid_state, PAUSED if self._collecting is None else COLLECTING, + self._input_device_class, ) async def async_get_last_sensor_data(self) -> UtilitySensorExtraStoredData | None: diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 13b367b1fb7..99a63809329 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -40,6 +40,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfEnergy, + UnitOfVolume, ) from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -553,8 +554,66 @@ async def test_entity_name(hass: HomeAssistant, yaml_config, entity_id, name) -> ), ], ) +@pytest.mark.parametrize( + ( + "energy_sensor_attributes", + "gas_sensor_attributes", + "energy_meter_attributes", + "gas_meter_attributes", + ), + [ + ( + {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}, + {ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit"}, + { + ATTR_DEVICE_CLASS: SensorDeviceClass.ENERGY, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + }, + { + ATTR_DEVICE_CLASS: None, + ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit", + }, + ), + ( + {}, + {}, + { + ATTR_DEVICE_CLASS: None, + ATTR_UNIT_OF_MEASUREMENT: None, + }, + { + ATTR_DEVICE_CLASS: None, + ATTR_UNIT_OF_MEASUREMENT: None, + }, + ), + ( + { + ATTR_DEVICE_CLASS: SensorDeviceClass.GAS, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + }, + { + ATTR_DEVICE_CLASS: SensorDeviceClass.WATER, + ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit", + }, + { + ATTR_DEVICE_CLASS: SensorDeviceClass.GAS, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR, + }, + { + ATTR_DEVICE_CLASS: SensorDeviceClass.WATER, + ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit", + }, + ), + ], +) async def test_device_class( - hass: HomeAssistant, yaml_config, config_entry_configs + hass: HomeAssistant, + yaml_config, + config_entry_configs, + energy_sensor_attributes, + gas_sensor_attributes, + energy_meter_attributes, + gas_meter_attributes, ) -> None: """Test utility device_class.""" if yaml_config: @@ -579,27 +638,23 @@ async def test_device_class( await hass.async_block_till_done() - hass.states.async_set( - entity_id_energy, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR} - ) - hass.states.async_set( - entity_id_gas, 2, {ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit"} - ) + hass.states.async_set(entity_id_energy, 2, energy_sensor_attributes) + hass.states.async_set(entity_id_gas, 2, gas_sensor_attributes) await hass.async_block_till_done() state = hass.states.get("sensor.energy_meter") assert state is not None assert state.state == "0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR + for attr, value in energy_meter_attributes.items(): + assert state.attributes.get(attr) == value state = hass.states.get("sensor.gas_meter") assert state is not None assert state.state == "0" - assert state.attributes.get(ATTR_DEVICE_CLASS) is None assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "some_archaic_unit" + for attr, value in gas_meter_attributes.items(): + assert state.attributes.get(attr) == value @pytest.mark.parametrize( @@ -610,7 +665,13 @@ async def test_device_class( "utility_meter": { "energy_bill": { "source": "sensor.energy", - "tariffs": ["tariff1", "tariff2", "tariff3", "tariff4"], + "tariffs": [ + "tariff0", + "tariff1", + "tariff2", + "tariff3", + "tariff4", + ], } } }, @@ -626,7 +687,13 @@ async def test_device_class( "offset": 0, "periodically_resetting": True, "source": "sensor.energy", - "tariffs": ["tariff1", "tariff2", "tariff3", "tariff4"], + "tariffs": [ + "tariff0", + "tariff1", + "tariff2", + "tariff3", + "tariff4", + ], }, ), ], @@ -644,7 +711,33 @@ async def test_restore_state( mock_restore_cache_with_extra_data( hass, [ - # sensor.energy_bill_tariff1 is restored as expected + # sensor.energy_bill_tariff0 is restored as expected, including device + # class + ( + State( + "sensor.energy_bill_tariff0", + "0.1", + attributes={ + ATTR_STATUS: PAUSED, + ATTR_LAST_RESET: last_reset_1, + ATTR_UNIT_OF_MEASUREMENT: UnitOfVolume.CUBIC_METERS, + }, + ), + { + "native_value": { + "__type": "", + "decimal_str": "0.2", + }, + "native_unit_of_measurement": "gal", + "last_reset": last_reset_2, + "last_period": "1.3", + "last_valid_state": None, + "status": "collecting", + "input_device_class": "water", + }, + ), + # sensor.energy_bill_tariff1 is restored as expected, except device + # class ( State( "sensor.energy_bill_tariff1", @@ -743,12 +836,21 @@ async def test_restore_state( await hass.async_block_till_done() # restore from cache + state = hass.states.get("sensor.energy_bill_tariff0") + assert state.state == "0.2" + assert state.attributes.get("status") == COLLECTING + assert state.attributes.get("last_reset") == last_reset_2 + assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfVolume.GALLONS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + state = hass.states.get("sensor.energy_bill_tariff1") assert state.state == "1.2" assert state.attributes.get("status") == PAUSED assert state.attributes.get("last_reset") == last_reset_2 assert state.attributes.get("last_valid_state") == "None" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY state = hass.states.get("sensor.energy_bill_tariff2") assert state.state == "2.1" @@ -756,6 +858,7 @@ async def test_restore_state( assert state.attributes.get("last_reset") == last_reset_1 assert state.attributes.get("last_valid_state") == "None" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY state = hass.states.get("sensor.energy_bill_tariff3") assert state.state == "3.1" @@ -763,6 +866,7 @@ async def test_restore_state( assert state.attributes.get("last_reset") == last_reset_1 assert state.attributes.get("last_valid_state") == "None" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY state = hass.states.get("sensor.energy_bill_tariff4") assert state.state == STATE_UNKNOWN @@ -770,16 +874,16 @@ async def test_restore_state( # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() state = hass.states.get("select.energy_bill") - assert state.state == "tariff1" + assert state.state == "tariff0" - state = hass.states.get("sensor.energy_bill_tariff1") + state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING for entity_id in ( + "sensor.energy_bill_tariff1", "sensor.energy_bill_tariff2", "sensor.energy_bill_tariff3", "sensor.energy_bill_tariff4", From 1c6689be41f0342b2b0918307242483c11128163 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:37:57 +0100 Subject: [PATCH 018/426] Update pytile to 2023.12.0 (#114370) --- homeassistant/components/tile/manifest.json | 2 +- pyproject.toml | 2 -- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tile/manifest.json b/homeassistant/components/tile/manifest.json index 6f311fc5593..8dceddcb77f 100644 --- a/homeassistant/components/tile/manifest.json +++ b/homeassistant/components/tile/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pytile"], - "requirements": ["pytile==2023.04.0"] + "requirements": ["pytile==2023.12.0"] } diff --git a/pyproject.toml b/pyproject.toml index c84405c2764..40b1f36a58b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -504,8 +504,6 @@ filterwarnings = [ # https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0 # https://github.com/eclipse/paho.mqtt.python/pull/665 "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client", - # https://github.com/bachya/pytile/pull/280 - >=2023.10.0 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pytile.tile", # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", diff --git a/requirements_all.txt b/requirements_all.txt index ffa57398564..37fcf49a5e5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2311,7 +2311,7 @@ python-vlc==3.0.18122 pythonegardia==1.0.52 # homeassistant.components.tile -pytile==2023.04.0 +pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b35ef776b47..60ac93d37c5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1781,7 +1781,7 @@ python-technove==1.2.2 python-telegram-bot[socks]==21.0.1 # homeassistant.components.tile -pytile==2023.04.0 +pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 From 5e0a0718e32b70c6f3d9c029f0a8a4e0c22d449e Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Mar 2024 11:09:59 +0100 Subject: [PATCH 019/426] Fix streamlabswater feedback (#114371) --- homeassistant/components/streamlabswater/__init__.py | 4 ++-- homeassistant/components/streamlabswater/strings.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/streamlabswater/__init__.py b/homeassistant/components/streamlabswater/__init__.py index c3bbe5a96d4..46acc443d2e 100644 --- a/homeassistant/components/streamlabswater/__init__.py +++ b/homeassistant/components/streamlabswater/__init__.py @@ -81,12 +81,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_create_issue( hass, DOMAIN, - f"deprecated_yaml_import_issue_${result['reason']}", + f"deprecated_yaml_import_issue_{result['reason']}", breaks_in_ha_version="2024.7.0", is_fixable=False, issue_domain=DOMAIN, severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_${result['reason']}", + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", translation_placeholders=ISSUE_PLACEHOLDER, ) return True diff --git a/homeassistant/components/streamlabswater/strings.json b/homeassistant/components/streamlabswater/strings.json index 204f7e831ef..872a0d1f6ac 100644 --- a/homeassistant/components/streamlabswater/strings.json +++ b/homeassistant/components/streamlabswater/strings.json @@ -52,7 +52,7 @@ "issues": { "deprecated_yaml_import_issue_cannot_connect": { "title": "The Streamlabs water YAML configuration import failed", - "description": "Configuring Streamlabs water using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to Streamlabs water works and restart Home Assistant to try again or remove the Streamlabs water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "description": "Configuring Streamlabs water using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Streamlabs water works and restart Home Assistant to try again or remove the Streamlabs water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." }, "deprecated_yaml_import_issue_unknown": { "title": "The Streamlabs water YAML configuration import failed", From ba12652cbc663c2d6c398e299fdc5591cfde923b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Mar 2024 11:10:46 +0100 Subject: [PATCH 020/426] Fix Suez water feedback (#114372) --- homeassistant/components/suez_water/sensor.py | 4 ++-- homeassistant/components/suez_water/strings.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 7060339250c..f48e78bb153 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -74,12 +74,12 @@ async def async_setup_platform( async_create_issue( hass, DOMAIN, - f"deprecated_yaml_import_issue_${result['reason']}", + f"deprecated_yaml_import_issue_{result['reason']}", breaks_in_ha_version="2024.7.0", is_fixable=False, issue_domain=DOMAIN, severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_${result['reason']}", + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", translation_placeholders=ISSUE_PLACEHOLDER, ) diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index b4b81a788b5..fd85565d297 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -32,7 +32,7 @@ }, "deprecated_yaml_import_issue_cannot_connect": { "title": "The Suez water YAML configuration import failed", - "description": "Configuring Suez water using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to Suez water works and restart Home Assistant to try again or remove the Suez water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "description": "Configuring Suez water using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Suez water works and restart Home Assistant to try again or remove the Suez water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." }, "deprecated_yaml_import_issue_unknown": { "title": "The Suez water YAML configuration import failed", From 99282d27c6c031be948e6c420f53a4f205c74be5 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Mar 2024 11:11:28 +0100 Subject: [PATCH 021/426] Fix Swiss public transport feedback (#114373) --- homeassistant/components/swiss_public_transport/sensor.py | 4 ++-- homeassistant/components/swiss_public_transport/strings.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/swiss_public_transport/sensor.py b/homeassistant/components/swiss_public_transport/sensor.py index 7c712c8c189..a4a9605a603 100644 --- a/homeassistant/components/swiss_public_transport/sensor.py +++ b/homeassistant/components/swiss_public_transport/sensor.py @@ -131,12 +131,12 @@ async def async_setup_platform( async_create_issue( hass, DOMAIN, - f"deprecated_yaml_import_issue_${result['reason']}", + f"deprecated_yaml_import_issue_{result['reason']}", breaks_in_ha_version="2024.7.0", is_fixable=False, issue_domain=DOMAIN, severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_${result['reason']}", + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", translation_placeholders=PLACEHOLDERS, ) diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index c0e88f08b8d..c080e785f2c 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -38,7 +38,7 @@ "issues": { "deprecated_yaml_import_issue_cannot_connect": { "title": "The swiss public transport YAML configuration import cannot connect to server", - "description": "Configuring swiss public transport using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the [opendata server]({opendata_url}). In case the server is down, try again later." + "description": "Configuring swiss public transport using YAML is being removed but there was a connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the [opendata server]({opendata_url}). In case the server is down, try again later." }, "deprecated_yaml_import_issue_bad_config": { "title": "The swiss public transport YAML configuration import request failed due to bad config", From 80273b4873a6ca9d02f64cb93154556eaac2445d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Mar 2024 11:12:02 +0100 Subject: [PATCH 022/426] Fix Lupusec feedback (#114374) --- homeassistant/components/lupusec/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/lupusec/__init__.py b/homeassistant/components/lupusec/__init__.py index c471902813a..51bba44aef0 100644 --- a/homeassistant/components/lupusec/__init__.py +++ b/homeassistant/components/lupusec/__init__.py @@ -79,12 +79,12 @@ async def handle_async_init_result(hass: HomeAssistant, domain: str, conf: dict) async_create_issue( hass, DOMAIN, - f"deprecated_yaml_import_issue_${result['reason']}", + f"deprecated_yaml_import_issue_{result['reason']}", breaks_in_ha_version="2024.8.0", is_fixable=False, issue_domain=DOMAIN, severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_${result['reason']}", + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", translation_placeholders=ISSUE_PLACEHOLDER, ) From aa301942493c8f5c81ea7c6b0b1711a6b9bdf15a Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Thu, 28 Mar 2024 13:25:01 +0100 Subject: [PATCH 023/426] Adapt Tractive integration the latest API changes (#114380) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/tractive/__init__.py | 15 ++------------- homeassistant/components/tractive/const.py | 1 - homeassistant/components/tractive/sensor.py | 5 ++--- 3 files changed, 4 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/tractive/__init__.py b/homeassistant/components/tractive/__init__.py index 41e691f783e..136e8b3632a 100644 --- a/homeassistant/components/tractive/__init__.py +++ b/homeassistant/components/tractive/__init__.py @@ -40,7 +40,6 @@ from .const import ( SERVER_UNAVAILABLE, SWITCH_KEY_MAP, TRACKABLES, - TRACKER_ACTIVITY_STATUS_UPDATED, TRACKER_HARDWARE_STATUS_UPDATED, TRACKER_POSITION_UPDATED, TRACKER_SWITCH_STATUS_UPDATED, @@ -220,9 +219,6 @@ class TractiveClient: if server_was_unavailable: _LOGGER.debug("Tractive is back online") server_was_unavailable = False - if event["message"] == "activity_update": - self._send_activity_update(event) - continue if event["message"] == "wellness_overview": self._send_wellness_update(event) continue @@ -291,15 +287,6 @@ class TractiveClient: TRACKER_SWITCH_STATUS_UPDATED, event["tracker_id"], payload ) - def _send_activity_update(self, event: dict[str, Any]) -> None: - payload = { - ATTR_MINUTES_ACTIVE: event["progress"]["achieved_minutes"], - ATTR_DAILY_GOAL: event["progress"]["goal_minutes"], - } - self._dispatch_tracker_event( - TRACKER_ACTIVITY_STATUS_UPDATED, event["pet_id"], payload - ) - def _send_wellness_update(self, event: dict[str, Any]) -> None: sleep_day = None sleep_night = None @@ -309,6 +296,8 @@ class TractiveClient: payload = { ATTR_ACTIVITY_LABEL: event["wellness"].get("activity_label"), ATTR_CALORIES: event["activity"]["calories"], + ATTR_DAILY_GOAL: event["activity"]["minutes_goal"], + ATTR_MINUTES_ACTIVE: event["activity"]["minutes_active"], ATTR_MINUTES_DAY_SLEEP: sleep_day, ATTR_MINUTES_NIGHT_SLEEP: sleep_night, ATTR_MINUTES_REST: event["activity"]["minutes_rest"], diff --git a/homeassistant/components/tractive/const.py b/homeassistant/components/tractive/const.py index acb4f6f7487..f26c0ee2345 100644 --- a/homeassistant/components/tractive/const.py +++ b/homeassistant/components/tractive/const.py @@ -26,7 +26,6 @@ CLIENT_ID = "625e5349c3c3b41c28a669f1" CLIENT = "client" TRACKABLES = "trackables" -TRACKER_ACTIVITY_STATUS_UPDATED = f"{DOMAIN}_tracker_activity_updated" TRACKER_HARDWARE_STATUS_UPDATED = f"{DOMAIN}_tracker_hardware_status_updated" TRACKER_POSITION_UPDATED = f"{DOMAIN}_tracker_position_updated" TRACKER_SWITCH_STATUS_UPDATED = f"{DOMAIN}_tracker_switch_updated" diff --git a/homeassistant/components/tractive/sensor.py b/homeassistant/components/tractive/sensor.py index b73b5faba05..5e2f3288f57 100644 --- a/homeassistant/components/tractive/sensor.py +++ b/homeassistant/components/tractive/sensor.py @@ -37,7 +37,6 @@ from .const import ( CLIENT, DOMAIN, TRACKABLES, - TRACKER_ACTIVITY_STATUS_UPDATED, TRACKER_HARDWARE_STATUS_UPDATED, TRACKER_WELLNESS_STATUS_UPDATED, ) @@ -118,7 +117,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = ( key=ATTR_MINUTES_ACTIVE, translation_key="activity_time", native_unit_of_measurement=UnitOfTime.MINUTES, - signal_prefix=TRACKER_ACTIVITY_STATUS_UPDATED, + signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED, state_class=SensorStateClass.TOTAL, ), TractiveSensorEntityDescription( @@ -139,7 +138,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = ( key=ATTR_DAILY_GOAL, translation_key="daily_goal", native_unit_of_measurement=UnitOfTime.MINUTES, - signal_prefix=TRACKER_ACTIVITY_STATUS_UPDATED, + signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED, ), TractiveSensorEntityDescription( key=ATTR_MINUTES_DAY_SLEEP, From 8cd871885519c50ac89b8da86faaa8a0276b3203 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 28 Mar 2024 16:20:20 +0100 Subject: [PATCH 024/426] Fix hassfest service icons check for custom integrations (#114389) --- script/hassfest/services.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 34f9b906fb5..c962d84e6e1 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -168,7 +168,8 @@ def validate_services(config: Config, integration: Integration) -> None: # 2. Check if the service has an icon set in icons.json. # raise an error if not., for service_name, service_schema in services.items(): - if service_name not in service_icons: + if integration.core and service_name not in service_icons: + # This is enforced for Core integrations only integration.add_error( "services", f"Service {service_name} has no icon in icons.json.", From c81e9447f9ccd334936d1d0164fd3459d8e1cd80 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 28 Mar 2024 11:09:15 -0500 Subject: [PATCH 025/426] Filter preferred TTS format options if not supported (#114392) Filter preferred format options if not supported --- homeassistant/components/tts/__init__.py | 74 ++++++++---- tests/components/assist_pipeline/conftest.py | 3 +- tests/components/assist_pipeline/test_init.py | 105 ++++++++++++++++-- 3 files changed, 150 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index c88e0e83334..8ea4617bbf3 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -16,7 +16,7 @@ import os import re import subprocess import tempfile -from typing import Any, TypedDict, final +from typing import Any, Final, TypedDict, final from aiohttp import web import mutagen @@ -99,6 +99,13 @@ ATTR_PREFERRED_SAMPLE_CHANNELS = "preferred_sample_channels" ATTR_MEDIA_PLAYER_ENTITY_ID = "media_player_entity_id" ATTR_VOICE = "voice" +_DEFAULT_FORMAT = "mp3" +_PREFFERED_FORMAT_OPTIONS: Final[set[str]] = { + ATTR_PREFERRED_FORMAT, + ATTR_PREFERRED_SAMPLE_RATE, + ATTR_PREFERRED_SAMPLE_CHANNELS, +} + CONF_LANG = "language" SERVICE_CLEAR_CACHE = "clear_cache" @@ -569,25 +576,23 @@ class SpeechManager: ): raise HomeAssistantError(f"Language '{language}' not supported") + options = options or {} + supported_options = engine_instance.supported_options or [] + # Update default options with provided options + invalid_opts: list[str] = [] merged_options = dict(engine_instance.default_options or {}) - merged_options.update(options or {}) + for option_name, option_value in options.items(): + # Only count an option as invalid if it's not a "preferred format" + # option. These are used as hints to the TTS system if supported, + # and otherwise as parameters to ffmpeg conversion. + if (option_name in supported_options) or ( + option_name in _PREFFERED_FORMAT_OPTIONS + ): + merged_options[option_name] = option_value + else: + invalid_opts.append(option_name) - supported_options = list(engine_instance.supported_options or []) - - # ATTR_PREFERRED_* options are always "supported" since they're used to - # convert audio after the TTS has run (if necessary). - supported_options.extend( - ( - ATTR_PREFERRED_FORMAT, - ATTR_PREFERRED_SAMPLE_RATE, - ATTR_PREFERRED_SAMPLE_CHANNELS, - ) - ) - - invalid_opts = [ - opt_name for opt_name in merged_options if opt_name not in supported_options - ] if invalid_opts: raise HomeAssistantError(f"Invalid options found: {invalid_opts}") @@ -687,10 +692,31 @@ class SpeechManager: This method is a coroutine. """ - options = options or {} + options = dict(options or {}) + supported_options = engine_instance.supported_options or [] - # Default to MP3 unless a different format is preferred - final_extension = options.get(ATTR_PREFERRED_FORMAT, "mp3") + # Extract preferred format options. + # + # These options are used by Assist pipelines, etc. to get a format that + # the voice satellite will support. + # + # The TTS system ideally supports options directly so we won't have + # to convert with ffmpeg later. If not, we pop the options here and + # perform the conversation after receiving the audio. + if ATTR_PREFERRED_FORMAT in supported_options: + final_extension = options.get(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT) + else: + final_extension = options.pop(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT) + + if ATTR_PREFERRED_SAMPLE_RATE in supported_options: + sample_rate = options.get(ATTR_PREFERRED_SAMPLE_RATE) + else: + sample_rate = options.pop(ATTR_PREFERRED_SAMPLE_RATE, None) + + if ATTR_PREFERRED_SAMPLE_CHANNELS in supported_options: + sample_channels = options.get(ATTR_PREFERRED_SAMPLE_CHANNELS) + else: + sample_channels = options.pop(ATTR_PREFERRED_SAMPLE_CHANNELS, None) async def get_tts_data() -> str: """Handle data available.""" @@ -716,8 +742,8 @@ class SpeechManager: # rate/format/channel count is requested. needs_conversion = ( (final_extension != extension) - or (ATTR_PREFERRED_SAMPLE_RATE in options) - or (ATTR_PREFERRED_SAMPLE_CHANNELS in options) + or (sample_rate is not None) + or (sample_channels is not None) ) if needs_conversion: @@ -726,8 +752,8 @@ class SpeechManager: extension, data, to_extension=final_extension, - to_sample_rate=options.get(ATTR_PREFERRED_SAMPLE_RATE), - to_sample_channels=options.get(ATTR_PREFERRED_SAMPLE_CHANNELS), + to_sample_rate=sample_rate, + to_sample_channels=sample_channels, ) # Create file infos diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index 8c5cfe9d599..9f098150288 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -111,6 +111,7 @@ class MockTTSProvider(tts.Provider): tts.Voice("fran_drescher", "Fran Drescher"), ] } + _supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT] @property def default_language(self) -> str: @@ -130,7 +131,7 @@ class MockTTSProvider(tts.Provider): @property def supported_options(self) -> list[str]: """Return list of supported options like voice, emotions.""" - return ["voice", "age", tts.ATTR_AUDIO_OUTPUT] + return self._supported_options def get_tts_audio( self, message: str, language: str, options: dict[str, Any] diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 81347e96235..c6f45044cb3 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -11,7 +11,7 @@ import wave import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components import assist_pipeline, stt, tts +from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( CONF_DEBUG_RECORDING_DIR, DOMAIN, @@ -19,9 +19,14 @@ from homeassistant.components.assist_pipeline.const import ( from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component -from .conftest import MockSttProvider, MockSttProviderEntity, MockWakeWordEntity +from .conftest import ( + MockSttProvider, + MockSttProviderEntity, + MockTTSProvider, + MockWakeWordEntity, +) -from tests.typing import WebSocketGenerator +from tests.typing import ClientSessionGenerator, WebSocketGenerator BYTES_ONE_SECOND = 16000 * 2 @@ -729,15 +734,17 @@ def test_pipeline_run_equality(hass: HomeAssistant, init_components) -> None: async def test_tts_audio_output( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + hass_client: ClientSessionGenerator, + mock_tts_provider: MockTTSProvider, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, snapshot: SnapshotAssertion, ) -> None: """Test using tts_audio_output with wav sets options correctly.""" + client = await hass_client() + assert await async_setup_component(hass, media_source.DOMAIN, {}) - def event_callback(event): - pass + events: list[assist_pipeline.PipelineEvent] = [] pipeline_store = pipeline_data.pipeline_store pipeline_id = pipeline_store.async_get_preferred_item() @@ -753,7 +760,7 @@ async def test_tts_audio_output( pipeline=pipeline, start_stage=assist_pipeline.PipelineStage.TTS, end_stage=assist_pipeline.PipelineStage.TTS, - event_callback=event_callback, + event_callback=events.append, tts_audio_output="wav", ), ) @@ -764,3 +771,87 @@ async def test_tts_audio_output( assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_RATE) == 16000 assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS) == 1 + + with patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio: + await pipeline_input.execute() + + for event in events: + if event.type == assist_pipeline.PipelineEventType.TTS_END: + # We must fetch the media URL to trigger the TTS + assert event.data + media_id = event.data["tts_output"]["media_id"] + resolved = await media_source.async_resolve_media(hass, media_id, None) + await client.get(resolved.url) + + # Ensure that no unsupported options were passed in + assert mock_get_tts_audio.called + options = mock_get_tts_audio.call_args_list[0].kwargs["options"] + extra_options = set(options).difference(mock_tts_provider.supported_options) + assert len(extra_options) == 0, extra_options + + +async def test_tts_supports_preferred_format( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_tts_provider: MockTTSProvider, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, + snapshot: SnapshotAssertion, +) -> None: + """Test that preferred format options are given to the TTS system if supported.""" + client = await hass_client() + assert await async_setup_component(hass, media_source.DOMAIN, {}) + + events: list[assist_pipeline.PipelineEvent] = [] + + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + tts_input="This is a test.", + conversation_id=None, + device_id=None, + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.TTS, + end_stage=assist_pipeline.PipelineStage.TTS, + event_callback=events.append, + tts_audio_output="wav", + ), + ) + await pipeline_input.validate() + + # Make the TTS provider support preferred format options + supported_options = list(mock_tts_provider.supported_options or []) + supported_options.extend( + [ + tts.ATTR_PREFERRED_FORMAT, + tts.ATTR_PREFERRED_SAMPLE_RATE, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS, + ] + ) + + with ( + patch.object(mock_tts_provider, "_supported_options", supported_options), + patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio, + ): + await pipeline_input.execute() + + for event in events: + if event.type == assist_pipeline.PipelineEventType.TTS_END: + # We must fetch the media URL to trigger the TTS + assert event.data + media_id = event.data["tts_output"]["media_id"] + resolved = await media_source.async_resolve_media(hass, media_id, None) + await client.get(resolved.url) + + assert mock_get_tts_audio.called + options = mock_get_tts_audio.call_args_list[0].kwargs["options"] + + # We should have received preferred format options in get_tts_audio + assert tts.ATTR_PREFERRED_FORMAT in options + assert tts.ATTR_PREFERRED_SAMPLE_RATE in options + assert tts.ATTR_PREFERRED_SAMPLE_CHANNELS in options From 53ba732ed04fb51b05cb6f30cf01c865008b5ef8 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 28 Mar 2024 16:57:29 +0100 Subject: [PATCH 026/426] Fix area search for entities of devices (#114394) --- homeassistant/components/search/__init__.py | 9 +++++--- tests/components/search/test_init.py | 24 ++++++++++++++++++--- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/search/__init__.py b/homeassistant/components/search/__init__.py index 71b51210a25..a85a21e8102 100644 --- a/homeassistant/components/search/__init__.py +++ b/homeassistant/components/search/__init__.py @@ -136,6 +136,9 @@ class Searcher: # Scripts referencing this area self._add(ItemType.SCRIPT, script.scripts_with_area(self.hass, area_id)) + # Entity in this area, will extend this with the entities of the devices in this area + entity_entries = er.async_entries_for_area(self._entity_registry, area_id) + # Devices in this area for device in dr.async_entries_for_area(self._device_registry, area_id): self._add(ItemType.DEVICE, device.id) @@ -160,10 +163,10 @@ class Searcher: # Skip the entity if it's in a different area if entity_entry.area_id is not None: continue - self._add(ItemType.ENTITY, entity_entry.entity_id) + entity_entries.append(entity_entry) - # Entities in this area - for entity_entry in er.async_entries_for_area(self._entity_registry, area_id): + # Process entities in this area + for entity_entry in entity_entries: self._add(ItemType.ENTITY, entity_entry.entity_id) # If this entity also exists as a resource, we add it. diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index ee7b60dc9ac..a817fbfc39e 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -496,11 +496,14 @@ async def test_search( ItemType.SCRIPT: {script_scene_entity.entity_id, "script.nested"}, } assert search(ItemType.AREA, living_room_area.id) == { - ItemType.AUTOMATION: {"automation.wled_device"}, + ItemType.AUTOMATION: {"automation.wled_device", "automation.wled_entity"}, ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, + ItemType.SCRIPT: {"script.wled"}, } assert search(ItemType.AREA, kitchen_area.id) == { ItemType.AUTOMATION: {"automation.area"}, @@ -511,7 +514,9 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.SCRIPT: {"script.area", "script.device"}, + ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, + ItemType.SCRIPT: {"script.area", "script.device", "script.hue"}, } assert not search(ItemType.AUTOMATION, "automation.unknown") @@ -726,6 +731,7 @@ async def test_search( "automation.area", "automation.floor", "automation.wled_device", + "automation.wled_entity", }, ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id, wled_config_entry.entry_id}, ItemType.DEVICE: {hue_device.id, wled_device.id}, @@ -734,7 +740,19 @@ async def test_search( hue_segment_1_entity.entity_id, hue_segment_2_entity.entity_id, }, - ItemType.SCRIPT: {"script.device", "script.area", "script.floor"}, + ItemType.GROUP: {"group.hue", "group.wled", "group.wled_hue"}, + ItemType.SCENE: { + "scene.scene_hue_seg_1", + "scene.scene_wled_seg_1", + scene_wled_hue_entity.entity_id, + }, + ItemType.SCRIPT: { + "script.device", + "script.area", + "script.floor", + "script.hue", + "script.wled", + }, } assert search(ItemType.FLOOR, second_floor.floor_id) == { ItemType.AREA: {bedroom_area.id}, From 8e4cf4e4a7fbcefa17b7b8861d4edaf6e550a6de Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Thu, 28 Mar 2024 20:38:12 +0100 Subject: [PATCH 027/426] Update frontend to 20240328.0 (#114396) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 10917bb7f70..9e86436bd68 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240327.0"] + "requirements": ["home-assistant-frontend==20240328.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9af8c2f3e0a..b7db1514cba 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240327.0 +home-assistant-frontend==20240328.0 home-assistant-intents==2024.3.27 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 37fcf49a5e5..9c9e18a6ff3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240327.0 +home-assistant-frontend==20240328.0 # homeassistant.components.conversation home-assistant-intents==2024.3.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 60ac93d37c5..e3cd33e994e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240327.0 +home-assistant-frontend==20240328.0 # homeassistant.components.conversation home-assistant-intents==2024.3.27 From 7a53ea4b92a7e810a73915f37c834af55cb1b72b Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Thu, 28 Mar 2024 12:52:17 -0500 Subject: [PATCH 028/426] Bump aioraven to 0.5.3 (#114397) --- homeassistant/components/rainforest_raven/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/rainforest_raven/manifest.json b/homeassistant/components/rainforest_raven/manifest.json index ad161d32201..a2717f0e886 100644 --- a/homeassistant/components/rainforest_raven/manifest.json +++ b/homeassistant/components/rainforest_raven/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["usb"], "documentation": "https://www.home-assistant.io/integrations/rainforest_raven", "iot_class": "local_polling", - "requirements": ["aioraven==0.5.2"], + "requirements": ["aioraven==0.5.3"], "usb": [ { "vid": "0403", diff --git a/requirements_all.txt b/requirements_all.txt index 9c9e18a6ff3..10c044009be 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.5.2 +aioraven==0.5.3 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e3cd33e994e..dd555cfebc8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -323,7 +323,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.5.2 +aioraven==0.5.3 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 From 612988cf3e07c56ebd67293335ebf3ee378b8b9e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 28 Mar 2024 20:43:23 +0100 Subject: [PATCH 029/426] Bump version to 2024.4.0b1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index d458a66b865..f9a9b6324f8 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b0" +PATCH_VERSION: Final = "0b1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 40b1f36a58b..48f520a878c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b0" +version = "2024.4.0b1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From b8a2c148131a3b1913871aea07d3cd4e43a9317a Mon Sep 17 00:00:00 2001 From: Jeremy TRUFIER Date: Fri, 29 Mar 2024 14:51:44 +0100 Subject: [PATCH 030/426] Follow real AtlanticPassAPCZoneControlZone physical mode on Overkiz (HEAT, COOL or HEAT_COOL) (#111830) * Support HEAT_COOL when mode is Auto on overkiz AtlanticPassAPCZoneControlZone * Refactor ZoneControlZone to simplify usic by only using a single hvac mode * Fix linting issues * Makes more sense to use halves there * Fix PR feedback --- homeassistant/components/overkiz/climate.py | 25 +- .../atlantic_pass_apc_heating_zone.py | 4 +- .../atlantic_pass_apc_zone_control_zone.py | 394 ++++++++++++++---- 3 files changed, 325 insertions(+), 98 deletions(-) diff --git a/homeassistant/components/overkiz/climate.py b/homeassistant/components/overkiz/climate.py index e23403c2162..b569d05d2d7 100644 --- a/homeassistant/components/overkiz/climate.py +++ b/homeassistant/components/overkiz/climate.py @@ -7,6 +7,7 @@ from typing import cast from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HomeAssistantOverkizData @@ -27,15 +28,16 @@ async def async_setup_entry( """Set up the Overkiz climate from a config entry.""" data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( + # Match devices based on the widget. + entities_based_on_widget: list[Entity] = [ WIDGET_TO_CLIMATE_ENTITY[device.widget](device.device_url, data.coordinator) for device in data.platforms[Platform.CLIMATE] if device.widget in WIDGET_TO_CLIMATE_ENTITY - ) + ] - # Match devices based on the widget and controllableName - # This is for example used for Atlantic APC, where devices with different functionality share the same uiClass and widget. - async_add_entities( + # Match devices based on the widget and controllableName. + # ie Atlantic APC + entities_based_on_widget_and_controllable: list[Entity] = [ WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget][ cast(Controllable, device.controllable_name) ](device.device_url, data.coordinator) @@ -43,14 +45,21 @@ async def async_setup_entry( if device.widget in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY and device.controllable_name in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget] - ) + ] - # Hitachi Air To Air Heat Pumps - async_add_entities( + # Match devices based on the widget and protocol. + # #ie Hitachi Air To Air Heat Pumps + entities_based_on_widget_and_protocol: list[Entity] = [ WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget][device.protocol]( device.device_url, data.coordinator ) for device in data.platforms[Platform.CLIMATE] if device.widget in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY and device.protocol in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget] + ] + + async_add_entities( + entities_based_on_widget + + entities_based_on_widget_and_controllable + + entities_based_on_widget_and_protocol ) diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py index bf6aa43644e..3da2ccc922b 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py +++ b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py @@ -159,7 +159,7 @@ class AtlanticPassAPCHeatingZone(OverkizEntity, ClimateEntity): await self.async_set_heating_mode(PRESET_MODES_TO_OVERKIZ[preset_mode]) @property - def preset_mode(self) -> str: + def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" heating_mode = cast( str, self.executor.select_state(OverkizState.IO_PASS_APC_HEATING_MODE) @@ -179,7 +179,7 @@ class AtlanticPassAPCHeatingZone(OverkizEntity, ClimateEntity): return OVERKIZ_TO_PRESET_MODES[heating_mode] @property - def target_temperature(self) -> float: + def target_temperature(self) -> float | None: """Return hvac target temperature.""" current_heating_profile = self.current_heating_profile if current_heating_profile in OVERKIZ_TEMPERATURE_STATE_BY_PROFILE: diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py index 261acc2838c..f18edd0cfe6 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py +++ b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py @@ -3,16 +3,24 @@ from __future__ import annotations from asyncio import sleep +from functools import cached_property from typing import Any, cast from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState -from homeassistant.components.climate import PRESET_NONE, HVACMode -from homeassistant.const import ATTR_TEMPERATURE +from homeassistant.components.climate import ( + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + PRESET_NONE, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES from ..coordinator import OverkizDataUpdateCoordinator +from ..executor import OverkizExecutor from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone -from .atlantic_pass_apc_zone_control import OVERKIZ_TO_HVAC_MODE PRESET_SCHEDULE = "schedule" PRESET_MANUAL = "manual" @@ -24,32 +32,127 @@ OVERKIZ_MODE_TO_PRESET_MODES: dict[str, str] = { PRESET_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_MODE_TO_PRESET_MODES.items()} -TEMPERATURE_ZONECONTROL_DEVICE_INDEX = 20 +# Maps the HVAC current ZoneControl system operating mode. +OVERKIZ_TO_HVAC_ACTION: dict[str, HVACAction] = { + OverkizCommandParam.COOLING: HVACAction.COOLING, + OverkizCommandParam.DRYING: HVACAction.DRYING, + OverkizCommandParam.HEATING: HVACAction.HEATING, + # There is no known way to differentiate OFF from Idle. + OverkizCommandParam.STOP: HVACAction.OFF, +} + +HVAC_ACTION_TO_OVERKIZ_PROFILE_STATE: dict[HVACAction, OverkizState] = { + HVACAction.COOLING: OverkizState.IO_PASS_APC_COOLING_PROFILE, + HVACAction.HEATING: OverkizState.IO_PASS_APC_HEATING_PROFILE, +} + +HVAC_ACTION_TO_OVERKIZ_MODE_STATE: dict[HVACAction, OverkizState] = { + HVACAction.COOLING: OverkizState.IO_PASS_APC_COOLING_MODE, + HVACAction.HEATING: OverkizState.IO_PASS_APC_HEATING_MODE, +} + +TEMPERATURE_ZONECONTROL_DEVICE_INDEX = 1 + +SUPPORTED_FEATURES: ClimateEntityFeature = ( + ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON +) + +OVERKIZ_THERMAL_CONFIGURATION_TO_HVAC_MODE: dict[ + OverkizCommandParam, tuple[HVACMode, ClimateEntityFeature] +] = { + OverkizCommandParam.COOLING: ( + HVACMode.COOL, + SUPPORTED_FEATURES | ClimateEntityFeature.TARGET_TEMPERATURE, + ), + OverkizCommandParam.HEATING: ( + HVACMode.HEAT, + SUPPORTED_FEATURES | ClimateEntityFeature.TARGET_TEMPERATURE, + ), + OverkizCommandParam.HEATING_AND_COOLING: ( + HVACMode.HEAT_COOL, + SUPPORTED_FEATURES | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE, + ), +} -# Those device depends on a main probe that choose the operating mode (heating, cooling, ...) +# Those device depends on a main probe that choose the operating mode (heating, cooling, ...). class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Representation of Atlantic Pass APC Heating And Cooling Zone Control.""" + _attr_target_temperature_step = PRECISION_HALVES + def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator ) -> None: """Init method.""" super().__init__(device_url, coordinator) - # There is less supported functions, because they depend on the ZoneControl. - if not self.is_using_derogated_temperature_fallback: - # Modes are not configurable, they will follow current HVAC Mode of Zone Control. - self._attr_hvac_modes = [] + # When using derogated temperature, we fallback to legacy behavior. + if self.is_using_derogated_temperature_fallback: + return - # Those are available and tested presets on Shogun. - self._attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] + self._attr_hvac_modes = [] + self._attr_supported_features = ClimateEntityFeature(0) + + # Modes depends on device capabilities. + if (thermal_configuration := self.thermal_configuration) is not None: + ( + device_hvac_mode, + climate_entity_feature, + ) = thermal_configuration + self._attr_hvac_modes = [device_hvac_mode, HVACMode.OFF] + self._attr_supported_features = climate_entity_feature + + # Those are available and tested presets on Shogun. + self._attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] # Those APC Heating and Cooling probes depends on the zone control device (main probe). # Only the base device (#1) can be used to get/set some states. # Like to retrieve and set the current operating mode (heating, cooling, drying, off). - self.zone_control_device = self.executor.linked_device( - TEMPERATURE_ZONECONTROL_DEVICE_INDEX + + self.zone_control_executor: OverkizExecutor | None = None + + if ( + zone_control_device := self.executor.linked_device( + TEMPERATURE_ZONECONTROL_DEVICE_INDEX + ) + ) is not None: + self.zone_control_executor = OverkizExecutor( + zone_control_device.device_url, + coordinator, + ) + + @cached_property + def thermal_configuration(self) -> tuple[HVACMode, ClimateEntityFeature] | None: + """Retrieve thermal configuration for this devices.""" + + if ( + ( + state_thermal_configuration := cast( + OverkizCommandParam | None, + self.executor.select_state(OverkizState.CORE_THERMAL_CONFIGURATION), + ) + ) + is not None + and state_thermal_configuration + in OVERKIZ_THERMAL_CONFIGURATION_TO_HVAC_MODE + ): + return OVERKIZ_THERMAL_CONFIGURATION_TO_HVAC_MODE[ + state_thermal_configuration + ] + + return None + + @cached_property + def device_hvac_mode(self) -> HVACMode | None: + """ZoneControlZone device has a single possible mode.""" + + return ( + None + if self.thermal_configuration is None + else self.thermal_configuration[0] ) @property @@ -61,21 +164,37 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): ) @property - def zone_control_hvac_mode(self) -> HVACMode: + def zone_control_hvac_action(self) -> HVACAction: """Return hvac operation ie. heat, cool, dry, off mode.""" - if ( - self.zone_control_device is not None - and ( - state := self.zone_control_device.states[ + if self.zone_control_executor is not None and ( + ( + state := self.zone_control_executor.select_state( OverkizState.IO_PASS_APC_OPERATING_MODE - ] + ) ) is not None - and (value := state.value_as_str) is not None ): - return OVERKIZ_TO_HVAC_MODE[value] - return HVACMode.OFF + return OVERKIZ_TO_HVAC_ACTION[cast(str, state)] + + return HVACAction.OFF + + @property + def hvac_action(self) -> HVACAction | None: + """Return the current running hvac operation.""" + + # When ZoneControl action is heating/cooling but Zone is stopped, means the zone is idle. + if ( + hvac_action := self.zone_control_hvac_action + ) in HVAC_ACTION_TO_OVERKIZ_PROFILE_STATE and cast( + str, + self.executor.select_state( + HVAC_ACTION_TO_OVERKIZ_PROFILE_STATE[hvac_action] + ), + ) == OverkizCommandParam.STOP: + return HVACAction.IDLE + + return hvac_action @property def hvac_mode(self) -> HVACMode: @@ -84,30 +203,32 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): if self.is_using_derogated_temperature_fallback: return super().hvac_mode - zone_control_hvac_mode = self.zone_control_hvac_mode + if (device_hvac_mode := self.device_hvac_mode) is None: + return HVACMode.OFF - # Should be same, because either thermostat or this integration change both. - on_off_state = cast( + cooling_is_off = cast( str, - self.executor.select_state( - OverkizState.CORE_COOLING_ON_OFF - if zone_control_hvac_mode == HVACMode.COOL - else OverkizState.CORE_HEATING_ON_OFF - ), - ) + self.executor.select_state(OverkizState.CORE_COOLING_ON_OFF), + ) in (OverkizCommandParam.OFF, None) + + heating_is_off = cast( + str, + self.executor.select_state(OverkizState.CORE_HEATING_ON_OFF), + ) in (OverkizCommandParam.OFF, None) # Device is Stopped, it means the air flux is flowing but its venting door is closed. - if on_off_state == OverkizCommandParam.OFF: - hvac_mode = HVACMode.OFF - else: - hvac_mode = zone_control_hvac_mode + if ( + (device_hvac_mode == HVACMode.COOL and cooling_is_off) + or (device_hvac_mode == HVACMode.HEAT and heating_is_off) + or ( + device_hvac_mode == HVACMode.HEAT_COOL + and cooling_is_off + and heating_is_off + ) + ): + return HVACMode.OFF - # It helps keep it consistent with the Zone Control, within the interface. - if self._attr_hvac_modes != [zone_control_hvac_mode, HVACMode.OFF]: - self._attr_hvac_modes = [zone_control_hvac_mode, HVACMode.OFF] - self.async_write_ha_state() - - return hvac_mode + return device_hvac_mode async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" @@ -118,46 +239,49 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): # They are mainly managed by the Zone Control device # However, it make sense to map the OFF Mode to the Overkiz STOP Preset - if hvac_mode == HVACMode.OFF: - await self.executor.async_execute_command( - OverkizCommand.SET_COOLING_ON_OFF, - OverkizCommandParam.OFF, - ) - await self.executor.async_execute_command( - OverkizCommand.SET_HEATING_ON_OFF, - OverkizCommandParam.OFF, - ) - else: - await self.executor.async_execute_command( - OverkizCommand.SET_COOLING_ON_OFF, - OverkizCommandParam.ON, - ) - await self.executor.async_execute_command( - OverkizCommand.SET_HEATING_ON_OFF, - OverkizCommandParam.ON, - ) + on_off_target_command_param = ( + OverkizCommandParam.OFF + if hvac_mode == HVACMode.OFF + else OverkizCommandParam.ON + ) + + await self.executor.async_execute_command( + OverkizCommand.SET_COOLING_ON_OFF, + on_off_target_command_param, + ) + await self.executor.async_execute_command( + OverkizCommand.SET_HEATING_ON_OFF, + on_off_target_command_param, + ) await self.async_refresh_modes() @property - def preset_mode(self) -> str: + def preset_mode(self) -> str | None: """Return the current preset mode, e.g., schedule, manual.""" if self.is_using_derogated_temperature_fallback: return super().preset_mode - mode = OVERKIZ_MODE_TO_PRESET_MODES[ - cast( - str, - self.executor.select_state( - OverkizState.IO_PASS_APC_COOLING_MODE - if self.zone_control_hvac_mode == HVACMode.COOL - else OverkizState.IO_PASS_APC_HEATING_MODE - ), + if ( + self.zone_control_hvac_action in HVAC_ACTION_TO_OVERKIZ_MODE_STATE + and ( + mode_state := HVAC_ACTION_TO_OVERKIZ_MODE_STATE[ + self.zone_control_hvac_action + ] ) - ] + and ( + ( + mode := OVERKIZ_MODE_TO_PRESET_MODES[ + cast(str, self.executor.select_state(mode_state)) + ] + ) + is not None + ) + ): + return mode - return mode if mode is not None else PRESET_NONE + return PRESET_NONE async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" @@ -178,13 +302,18 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): await self.async_refresh_modes() @property - def target_temperature(self) -> float: + def target_temperature(self) -> float | None: """Return hvac target temperature.""" if self.is_using_derogated_temperature_fallback: return super().target_temperature - if self.zone_control_hvac_mode == HVACMode.COOL: + device_hvac_mode = self.device_hvac_mode + + if device_hvac_mode == HVACMode.HEAT_COOL: + return None + + if device_hvac_mode == HVACMode.COOL: return cast( float, self.executor.select_state( @@ -192,7 +321,7 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): ), ) - if self.zone_control_hvac_mode == HVACMode.HEAT: + if device_hvac_mode == HVACMode.HEAT: return cast( float, self.executor.select_state( @@ -204,32 +333,73 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): float, self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE) ) + @property + def target_temperature_high(self) -> float | None: + """Return the highbound target temperature we try to reach (cooling).""" + + if self.device_hvac_mode != HVACMode.HEAT_COOL: + return None + + return cast( + float, + self.executor.select_state(OverkizState.CORE_COOLING_TARGET_TEMPERATURE), + ) + + @property + def target_temperature_low(self) -> float | None: + """Return the lowbound target temperature we try to reach (heating).""" + + if self.device_hvac_mode != HVACMode.HEAT_COOL: + return None + + return cast( + float, + self.executor.select_state(OverkizState.CORE_HEATING_TARGET_TEMPERATURE), + ) + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new temperature.""" if self.is_using_derogated_temperature_fallback: return await super().async_set_temperature(**kwargs) - temperature = kwargs[ATTR_TEMPERATURE] + target_temperature = kwargs.get(ATTR_TEMPERATURE) + target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) + target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) + hvac_mode = self.hvac_mode + + if hvac_mode == HVACMode.HEAT_COOL: + if target_temp_low is not None: + await self.executor.async_execute_command( + OverkizCommand.SET_HEATING_TARGET_TEMPERATURE, + target_temp_low, + ) + + if target_temp_high is not None: + await self.executor.async_execute_command( + OverkizCommand.SET_COOLING_TARGET_TEMPERATURE, + target_temp_high, + ) + + elif target_temperature is not None: + if hvac_mode == HVACMode.HEAT: + await self.executor.async_execute_command( + OverkizCommand.SET_HEATING_TARGET_TEMPERATURE, + target_temperature, + ) + + elif hvac_mode == HVACMode.COOL: + await self.executor.async_execute_command( + OverkizCommand.SET_COOLING_TARGET_TEMPERATURE, + target_temperature, + ) - # Change both (heating/cooling) temperature is a good way to have consistency - await self.executor.async_execute_command( - OverkizCommand.SET_HEATING_TARGET_TEMPERATURE, - temperature, - ) - await self.executor.async_execute_command( - OverkizCommand.SET_COOLING_TARGET_TEMPERATURE, - temperature, - ) await self.executor.async_execute_command( OverkizCommand.SET_DEROGATION_ON_OFF_STATE, - OverkizCommandParam.OFF, + OverkizCommandParam.ON, ) - # Target temperature may take up to 1 minute to get refreshed. - await self.executor.async_execute_command( - OverkizCommand.REFRESH_TARGET_TEMPERATURE - ) + await self.async_refresh_modes() async def async_refresh_modes(self) -> None: """Refresh the device modes to have new states.""" @@ -256,3 +426,51 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): await self.executor.async_execute_command( OverkizCommand.REFRESH_TARGET_TEMPERATURE ) + + @property + def min_temp(self) -> float: + """Return Minimum Temperature for AC of this group.""" + + device_hvac_mode = self.device_hvac_mode + + if device_hvac_mode in (HVACMode.HEAT, HVACMode.HEAT_COOL): + return cast( + float, + self.executor.select_state( + OverkizState.CORE_MINIMUM_HEATING_TARGET_TEMPERATURE + ), + ) + + if device_hvac_mode == HVACMode.COOL: + return cast( + float, + self.executor.select_state( + OverkizState.CORE_MINIMUM_COOLING_TARGET_TEMPERATURE + ), + ) + + return super().min_temp + + @property + def max_temp(self) -> float: + """Return Max Temperature for AC of this group.""" + + device_hvac_mode = self.device_hvac_mode + + if device_hvac_mode == HVACMode.HEAT: + return cast( + float, + self.executor.select_state( + OverkizState.CORE_MAXIMUM_HEATING_TARGET_TEMPERATURE + ), + ) + + if device_hvac_mode in (HVACMode.COOL, HVACMode.HEAT_COOL): + return cast( + float, + self.executor.select_state( + OverkizState.CORE_MAXIMUM_COOLING_TARGET_TEMPERATURE + ), + ) + + return super().max_temp From 65d25bd7803619123eaabb6aeef6bf87ff894083 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexey=20ALERT=20Rubash=D1=91ff?= Date: Fri, 29 Mar 2024 15:07:22 +0200 Subject: [PATCH 031/426] Add overkiz heating status, absence mode, and boost mode binary sensors for Atlantic Water Heater (#114184) * Adds heating status, absense mode, and boost mode binary sensors for Atlantic water heater * Renamed absence mode and boost mode binary sensors * Update homeassistant/components/overkiz/binary_sensor.py Co-authored-by: TheJulianJES * Update homeassistant/components/overkiz/binary_sensor.py Co-authored-by: TheJulianJES * Update homeassistant/components/overkiz/binary_sensor.py Co-authored-by: TheJulianJES --------- Co-authored-by: TheJulianJES --- .../components/overkiz/binary_sensor.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 871a70b3e0a..c37afc9cb0c 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -105,6 +105,22 @@ BINARY_SENSOR_DESCRIPTIONS: list[OverkizBinarySensorDescription] = [ ) == 1, ), + OverkizBinarySensorDescription( + key=OverkizState.CORE_HEATING_STATUS, + name="Heating status", + device_class=BinarySensorDeviceClass.HEAT, + value_fn=lambda state: state == OverkizCommandParam.ON, + ), + OverkizBinarySensorDescription( + key=OverkizState.MODBUSLINK_DHW_ABSENCE_MODE, + name="Absence mode", + value_fn=lambda state: state == OverkizCommandParam.ON, + ), + OverkizBinarySensorDescription( + key=OverkizState.MODBUSLINK_DHW_BOOST_MODE, + name="Boost mode", + value_fn=lambda state: state == OverkizCommandParam.ON, + ), ] SUPPORTED_STATES = { From 35e582a240edb66ed93d5c93d88a76bee9f6c383 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexey=20ALERT=20Rubash=D1=91ff?= Date: Fri, 29 Mar 2024 20:33:13 +0200 Subject: [PATCH 032/426] Add overkiz water targets temperature numbers for Atlantic water heater (#114185) * Adds water targets temperature numbers for Atlantic water heater * Update homeassistant/components/overkiz/number.py Co-authored-by: Mick Vleeshouwer * Update homeassistant/components/overkiz/number.py Co-authored-by: Mick Vleeshouwer * ruff formatting reverted * Update homeassistant/components/overkiz/number.py Co-authored-by: TheJulianJES * Update homeassistant/components/overkiz/number.py Co-authored-by: TheJulianJES * changed command hardcode to a constant --------- Co-authored-by: Mick Vleeshouwer Co-authored-by: TheJulianJES --- homeassistant/components/overkiz/number.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/homeassistant/components/overkiz/number.py b/homeassistant/components/overkiz/number.py index f81ed82f7b1..494d430c393 100644 --- a/homeassistant/components/overkiz/number.py +++ b/homeassistant/components/overkiz/number.py @@ -97,6 +97,28 @@ NUMBER_DESCRIPTIONS: list[OverkizNumberDescription] = [ max_value_state_name=OverkizState.CORE_MAXIMAL_SHOWER_MANUAL_MODE, entity_category=EntityCategory.CONFIG, ), + OverkizNumberDescription( + key=OverkizState.CORE_TARGET_DWH_TEMPERATURE, + name="Target temperature", + device_class=NumberDeviceClass.TEMPERATURE, + command=OverkizCommand.SET_TARGET_DHW_TEMPERATURE, + native_min_value=50, + native_max_value=65, + min_value_state_name=OverkizState.CORE_MINIMAL_TEMPERATURE_MANUAL_MODE, + max_value_state_name=OverkizState.CORE_MAXIMAL_TEMPERATURE_MANUAL_MODE, + entity_category=EntityCategory.CONFIG, + ), + OverkizNumberDescription( + key=OverkizState.CORE_WATER_TARGET_TEMPERATURE, + name="Water target temperature", + device_class=NumberDeviceClass.TEMPERATURE, + command=OverkizCommand.SET_WATER_TARGET_TEMPERATURE, + native_min_value=50, + native_max_value=65, + min_value_state_name=OverkizState.CORE_MINIMAL_TEMPERATURE_MANUAL_MODE, + max_value_state_name=OverkizState.CORE_MAXIMAL_TEMPERATURE_MANUAL_MODE, + entity_category=EntityCategory.CONFIG, + ), # SomfyHeatingTemperatureInterface OverkizNumberDescription( key=OverkizState.CORE_ECO_ROOM_TEMPERATURE, From bf4e527f4485e9b4b85103b4d0ed229dce51b7ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexey=20ALERT=20Rubash=D1=91ff?= Date: Fri, 29 Mar 2024 15:05:18 +0200 Subject: [PATCH 033/426] Add overkiz bottom tank water temperature and core control water temperature for Atlantic Water Heater (#114186) * Adds bottom tank water temperature and core conrol water temperature sensors for Atlantic water heater * Update homeassistant/components/overkiz/sensor.py Co-authored-by: TheJulianJES * Update homeassistant/components/overkiz/sensor.py Co-authored-by: TheJulianJES --------- Co-authored-by: TheJulianJES --- homeassistant/components/overkiz/sensor.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 2b0a222f96f..c62840eea97 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -399,6 +399,20 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ native_unit_of_measurement=UnitOfTime.SECONDS, entity_category=EntityCategory.DIAGNOSTIC, ), + OverkizSensorDescription( + key=OverkizState.CORE_BOTTOM_TANK_WATER_TEMPERATURE, + name="Bottom tank water temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + OverkizSensorDescription( + key=OverkizState.CORE_CONTROL_WATER_TARGET_TEMPERATURE, + name="Control water target temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), # Cover OverkizSensorDescription( key=OverkizState.CORE_TARGET_CLOSURE, From bc740f95c9fb64ec56bf04dfb2ff07ea4fa29f51 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 29 Mar 2024 05:20:26 -1000 Subject: [PATCH 034/426] Avoid concurrent radio operations with powerview hubs (#114399) Co-authored-by: kingy444 --- .../components/hunterdouglas_powerview/button.py | 3 ++- .../components/hunterdouglas_powerview/coordinator.py | 5 +++++ .../components/hunterdouglas_powerview/cover.py | 11 ++++++++--- .../components/hunterdouglas_powerview/select.py | 3 ++- .../components/hunterdouglas_powerview/sensor.py | 3 ++- 5 files changed, 19 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/hunterdouglas_powerview/button.py b/homeassistant/components/hunterdouglas_powerview/button.py index f7c90f3420b..ecb71f9653a 100644 --- a/homeassistant/components/hunterdouglas_powerview/button.py +++ b/homeassistant/components/hunterdouglas_powerview/button.py @@ -119,4 +119,5 @@ class PowerviewShadeButton(ShadeEntity, ButtonEntity): async def async_press(self) -> None: """Handle the button press.""" - await self.entity_description.press_action(self._shade) + async with self.coordinator.radio_operation_lock: + await self.entity_description.press_action(self._shade) diff --git a/homeassistant/components/hunterdouglas_powerview/coordinator.py b/homeassistant/components/hunterdouglas_powerview/coordinator.py index 1ea47ca9d1f..f074b06b2bc 100644 --- a/homeassistant/components/hunterdouglas_powerview/coordinator.py +++ b/homeassistant/components/hunterdouglas_powerview/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from datetime import timedelta import logging @@ -25,6 +26,10 @@ class PowerviewShadeUpdateCoordinator(DataUpdateCoordinator[PowerviewShadeData]) """Initialize DataUpdateCoordinator to gather data for specific Powerview Hub.""" self.shades = shades self.hub = hub + # The hub tends to crash if there are multiple radio operations at the same time + # but it seems to handle all other requests that do not use RF without issue + # so we have a lock to prevent multiple radio operations at the same time + self.radio_operation_lock = asyncio.Lock() super().__init__( hass, _LOGGER, diff --git a/homeassistant/components/hunterdouglas_powerview/cover.py b/homeassistant/components/hunterdouglas_powerview/cover.py index 453d5c4e920..57409f37ac9 100644 --- a/homeassistant/components/hunterdouglas_powerview/cover.py +++ b/homeassistant/components/hunterdouglas_powerview/cover.py @@ -67,7 +67,8 @@ async def async_setup_entry( for shade in pv_entry.shade_data.values(): _LOGGER.debug("Initial refresh of shade: %s", shade.name) - await shade.refresh(suppress_timeout=True) # default 15 second timeout + async with coordinator.radio_operation_lock: + await shade.refresh(suppress_timeout=True) # default 15 second timeout entities: list[ShadeEntity] = [] for shade in pv_entry.shade_data.values(): @@ -207,7 +208,8 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity): async def _async_execute_move(self, move: ShadePosition) -> None: """Execute a move that can affect multiple positions.""" _LOGGER.debug("Move request %s: %s", self.name, move) - response = await self._shade.move(move) + async with self.coordinator.radio_operation_lock: + response = await self._shade.move(move) _LOGGER.debug("Move response %s: %s", self.name, response) # Process the response from the hub (including new positions) @@ -318,7 +320,10 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity): # error if are already have one in flight return # suppress timeouts caused by hub nightly reboot - await self._shade.refresh(suppress_timeout=True) # default 15 second timeout + async with self.coordinator.radio_operation_lock: + await self._shade.refresh( + suppress_timeout=True + ) # default 15 second timeout _LOGGER.debug("Process update %s: %s", self.name, self._shade.current_position) self._async_update_shade_data(self._shade.current_position) diff --git a/homeassistant/components/hunterdouglas_powerview/select.py b/homeassistant/components/hunterdouglas_powerview/select.py index 66207f6da7c..f1e9c491659 100644 --- a/homeassistant/components/hunterdouglas_powerview/select.py +++ b/homeassistant/components/hunterdouglas_powerview/select.py @@ -114,5 +114,6 @@ class PowerViewSelect(ShadeEntity, SelectEntity): """Change the selected option.""" await self.entity_description.select_fn(self._shade, option) # force update data to ensure new info is in coordinator - await self._shade.refresh() + async with self.coordinator.radio_operation_lock: + await self._shade.refresh(suppress_timeout=True) self.async_write_ha_state() diff --git a/homeassistant/components/hunterdouglas_powerview/sensor.py b/homeassistant/components/hunterdouglas_powerview/sensor.py index bca87189e56..b24193ac438 100644 --- a/homeassistant/components/hunterdouglas_powerview/sensor.py +++ b/homeassistant/components/hunterdouglas_powerview/sensor.py @@ -153,5 +153,6 @@ class PowerViewSensor(ShadeEntity, SensorEntity): async def async_update(self) -> None: """Refresh sensor entity.""" - await self.entity_description.update_fn(self._shade) + async with self.coordinator.radio_operation_lock: + await self.entity_description.update_fn(self._shade) self.async_write_ha_state() From 906febadef43171da7df58e570f2134073209fa0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 28 Mar 2024 20:36:33 -1000 Subject: [PATCH 035/426] Cleanup some plex tasks that delayed startup (#114418) --- homeassistant/components/plex/__init__.py | 19 ++++--------------- homeassistant/components/plex/const.py | 1 - homeassistant/components/plex/helpers.py | 2 -- homeassistant/components/plex/server.py | 1 + 4 files changed, 5 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/plex/__init__.py b/homeassistant/components/plex/__init__.py index 4e17e4032aa..eb57dc46727 100644 --- a/homeassistant/components/plex/__init__.py +++ b/homeassistant/components/plex/__init__.py @@ -42,7 +42,6 @@ from .const import ( DOMAIN, INVALID_TOKEN_MESSAGE, PLATFORMS, - PLATFORMS_COMPLETED, PLEX_SERVER_CONFIG, PLEX_UPDATE_LIBRARY_SIGNAL, PLEX_UPDATE_PLATFORMS_SIGNAL, @@ -94,18 +93,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: gdm.scan(scan_for_clients=True) debouncer = Debouncer[None]( - hass, - _LOGGER, - cooldown=10, - immediate=True, - function=gdm_scan, + hass, _LOGGER, cooldown=10, immediate=True, function=gdm_scan, background=True ).async_call hass_data = PlexData( servers={}, dispatchers={}, websockets={}, - platforms_completed={}, gdm_scanner=gdm, gdm_debouncer=debouncer, ) @@ -180,7 +174,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: server_id = plex_server.machine_identifier hass_data = get_plex_data(hass) hass_data[SERVERS][server_id] = plex_server - hass_data[PLATFORMS_COMPLETED][server_id] = set() entry.add_update_listener(async_options_updated) @@ -233,11 +226,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) hass_data[WEBSOCKETS][server_id] = websocket - def start_websocket_session(platform): - hass_data[PLATFORMS_COMPLETED][server_id].add(platform) - if hass_data[PLATFORMS_COMPLETED][server_id] == PLATFORMS: - hass.loop.create_task(websocket.listen()) - def close_websocket_session(_): websocket.close() @@ -248,8 +236,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - for platform in PLATFORMS: - start_websocket_session(platform) + entry.async_create_background_task( + hass, websocket.listen(), f"plex websocket listener {entry.entry_id}" + ) async_cleanup_plex_devices(hass, entry) diff --git a/homeassistant/components/plex/const.py b/homeassistant/components/plex/const.py index 8dc75a447af..d5d70219471 100644 --- a/homeassistant/components/plex/const.py +++ b/homeassistant/components/plex/const.py @@ -24,7 +24,6 @@ GDM_SCANNER: Final = "gdm_scanner" PLATFORMS = frozenset( [Platform.BUTTON, Platform.MEDIA_PLAYER, Platform.SENSOR, Platform.UPDATE] ) -PLATFORMS_COMPLETED: Final = "platforms_completed" PLAYER_SOURCE = "player_source" SERVERS: Final = "servers" WEBSOCKETS: Final = "websockets" diff --git a/homeassistant/components/plex/helpers.py b/homeassistant/components/plex/helpers.py index f51350ac597..3c7ff8180c8 100644 --- a/homeassistant/components/plex/helpers.py +++ b/homeassistant/components/plex/helpers.py @@ -8,7 +8,6 @@ from typing import TYPE_CHECKING, Any, TypedDict from plexapi.gdm import GDM from plexwebsocket import PlexWebsocket -from homeassistant.const import Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant from .const import DOMAIN, SERVERS @@ -23,7 +22,6 @@ class PlexData(TypedDict): servers: dict[str, PlexServer] dispatchers: dict[str, list[CALLBACK_TYPE]] websockets: dict[str, PlexWebsocket] - platforms_completed: dict[str, set[Platform]] gdm_scanner: GDM gdm_debouncer: Callable[[], Coroutine[Any, Any, None]] diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index 9e2bf63ce55..584378d51f9 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -97,6 +97,7 @@ class PlexServer: cooldown=DEBOUNCE_TIMEOUT, immediate=True, function=self._async_update_platforms, + background=True, ).async_call self.thumbnail_cache = {} From db7d0a0ee91e7dcf60ed31c95bd3907fc3ed4db9 Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Fri, 29 Mar 2024 11:47:21 +0000 Subject: [PATCH 036/426] Bump python-ring-doorbell to 0.8.8 (#114431) * Bump ring_doorbell to 0.8.8 * Fix intercom history test for new library version --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/ring/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/ring/test_sensor.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 0390db640e5..764557a3a1d 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/ring", "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], - "requirements": ["ring-doorbell[listen]==0.8.7"] + "requirements": ["ring-doorbell[listen]==0.8.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 10c044009be..60681ce093f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2444,7 +2444,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.7 +ring-doorbell[listen]==0.8.8 # homeassistant.components.fleetgo ritassist==0.9.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dd555cfebc8..68e82ca22e1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1887,7 +1887,7 @@ reolink-aio==0.8.9 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.7 +ring-doorbell[listen]==0.8.8 # homeassistant.components.roku rokuecp==0.19.2 diff --git a/tests/components/ring/test_sensor.py b/tests/components/ring/test_sensor.py index aadea6f0ba1..2c866586c6c 100644 --- a/tests/components/ring/test_sensor.py +++ b/tests/components/ring/test_sensor.py @@ -87,7 +87,7 @@ async def test_history( assert front_door_last_activity_state.state == "2017-03-05T15:03:40+00:00" ingress_last_activity_state = hass.states.get("sensor.ingress_last_activity") - assert ingress_last_activity_state.state == "unknown" + assert ingress_last_activity_state.state == "2024-02-02T11:21:24+00:00" async def test_only_chime_devices( From c7ce53cc4954190e9b7a6b11537190a565599416 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Fri, 29 Mar 2024 17:46:21 +0100 Subject: [PATCH 037/426] Bump pyoverkiz to 1.13.9 (#114442) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index db24a299f2a..2ef0f0ebef4 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -19,7 +19,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.13.8"], + "requirements": ["pyoverkiz==1.13.9"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 60681ce093f..bc52a8a1c54 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2035,7 +2035,7 @@ pyotgw==2.1.3 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.8 +pyoverkiz==1.13.9 # homeassistant.components.openweathermap pyowm==3.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 68e82ca22e1..855eebb9912 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1583,7 +1583,7 @@ pyotgw==2.1.3 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.8 +pyoverkiz==1.13.9 # homeassistant.components.openweathermap pyowm==3.2.0 From cdd7ce435ab71ddcae228199dcce42f1fe240da0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 29 Mar 2024 16:20:43 +0100 Subject: [PATCH 038/426] Log warnings in Renault initialisation (#114445) --- homeassistant/components/renault/renault_vehicle.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/renault/renault_vehicle.py b/homeassistant/components/renault/renault_vehicle.py index 55a5574a444..59e1826ce1b 100644 --- a/homeassistant/components/renault/renault_vehicle.py +++ b/homeassistant/components/renault/renault_vehicle.py @@ -125,16 +125,16 @@ class RenaultVehicleProxy: coordinator = self.coordinators[key] if coordinator.not_supported: # Remove endpoint as it is not supported for this vehicle. - LOGGER.info( - "Ignoring endpoint %s as it is not supported for this vehicle: %s", + LOGGER.warning( + "Ignoring endpoint %s as it is not supported: %s", coordinator.name, coordinator.last_exception, ) del self.coordinators[key] elif coordinator.access_denied: # Remove endpoint as it is denied for this vehicle. - LOGGER.info( - "Ignoring endpoint %s as it is denied for this vehicle: %s", + LOGGER.warning( + "Ignoring endpoint %s as it is denied: %s", coordinator.name, coordinator.last_exception, ) From e4d973e8a2439f0fa23377eaa445f300b213418a Mon Sep 17 00:00:00 2001 From: Steven Looman Date: Fri, 29 Mar 2024 19:08:07 +0100 Subject: [PATCH 039/426] Bump async-upnp-client to 0.38.3 (#114447) --- homeassistant/components/dlna_dmr/manifest.json | 2 +- homeassistant/components/dlna_dms/manifest.json | 2 +- homeassistant/components/samsungtv/manifest.json | 2 +- homeassistant/components/ssdp/manifest.json | 2 +- homeassistant/components/upnp/manifest.json | 2 +- homeassistant/components/yeelight/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index 128822cf289..41fa49f1a94 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.38.2", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.38.3", "getmac==0.9.4"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index aaa6e1ee7de..c87e5e87779 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["async-upnp-client==0.38.2"], + "requirements": ["async-upnp-client==0.38.3"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 00b8fec8e6a..460e191828e 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -39,7 +39,7 @@ "samsungctl[websocket]==0.7.1", "samsungtvws[async,encrypted]==2.6.0", "wakeonlan==2.1.0", - "async-upnp-client==0.38.2" + "async-upnp-client==0.38.3" ], "ssdp": [ { diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index a9ef8af8c90..5e549c31806 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.38.2"] + "requirements": ["async-upnp-client==0.38.3"] } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index edfde84a2ac..7d353a475c7 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.38.2", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.38.3", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 20f8ed3ed4d..e9f304d38cb 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], "quality_scale": "platinum", - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.38.2"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.38.3"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b7db1514cba..c757aceee3c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -8,7 +8,7 @@ aiohttp==3.9.3 aiohttp_cors==0.7.0 astral==2.2 async-interrupt==1.1.1 -async-upnp-client==0.38.2 +async-upnp-client==0.38.3 atomicwrites-homeassistant==1.4.1 attrs==23.2.0 awesomeversion==24.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index bc52a8a1c54..f28d2bf3e25 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -487,7 +487,7 @@ asterisk_mbox==0.5.0 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.38.2 +async-upnp-client==0.38.3 # homeassistant.components.keyboard_remote asyncinotify==4.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 855eebb9912..987f0be178b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -442,7 +442,7 @@ asterisk_mbox==0.5.0 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.38.2 +async-upnp-client==0.38.3 # homeassistant.components.sleepiq asyncsleepiq==1.5.2 From 84901f1983fa6158da1037d307e07031bf591c0a Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Fri, 29 Mar 2024 19:34:16 +0100 Subject: [PATCH 040/426] Update frontend to 20240329.0 (#114452) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 9e86436bd68..a8f14187d48 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240328.0"] + "requirements": ["home-assistant-frontend==20240329.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c757aceee3c..2ebb82d2c75 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240328.0 +home-assistant-frontend==20240329.0 home-assistant-intents==2024.3.27 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f28d2bf3e25..dd59129a7e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240328.0 +home-assistant-frontend==20240329.0 # homeassistant.components.conversation home-assistant-intents==2024.3.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 987f0be178b..c46bf2e5a59 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240328.0 +home-assistant-frontend==20240329.0 # homeassistant.components.conversation home-assistant-intents==2024.3.27 From e53672250fe7a88d0139e383ca820fc825b88bc9 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 29 Mar 2024 19:35:52 +0100 Subject: [PATCH 041/426] Bump version to 2024.4.0b2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index f9a9b6324f8..4fca9fa50c3 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b1" +PATCH_VERSION: Final = "0b2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 48f520a878c..790ee03d76b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b1" +version = "2024.4.0b2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 953ceb0d8d275b13ce3df6466f1e7f673aaecbac Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 29 Mar 2024 11:18:21 -1000 Subject: [PATCH 042/426] Avoid tracking import executor jobs (#114453) --- homeassistant/core.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 3b52b020957..2ed4de35925 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -783,11 +783,11 @@ class HomeAssistant: def async_add_import_executor_job( self, target: Callable[[*_Ts], _T], *args: *_Ts ) -> asyncio.Future[_T]: - """Add an import executor job from within the event loop.""" - task = self.loop.run_in_executor(self.import_executor, target, *args) - self._tasks.add(task) - task.add_done_callback(self._tasks.remove) - return task + """Add an import executor job from within the event loop. + + The future returned from this method must be awaited in the event loop. + """ + return self.loop.run_in_executor(self.import_executor, target, *args) @overload @callback From 4f761c25d83cf6a575835b3e6aac9b7b2b28a20a Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Fri, 29 Mar 2024 22:13:31 +0100 Subject: [PATCH 043/426] Update frontend to 20240329.1 (#114459) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index a8f14187d48..7864801a986 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240329.0"] + "requirements": ["home-assistant-frontend==20240329.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 2ebb82d2c75..1d60b74f18f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240329.0 +home-assistant-frontend==20240329.1 home-assistant-intents==2024.3.27 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index dd59129a7e3..0a94eb110e1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240329.0 +home-assistant-frontend==20240329.1 # homeassistant.components.conversation home-assistant-intents==2024.3.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c46bf2e5a59..3a3ebbbb077 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240329.0 +home-assistant-frontend==20240329.1 # homeassistant.components.conversation home-assistant-intents==2024.3.27 From 11b8b01cde0424c3592dd22bd6f6ace14827bc6a Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 29 Mar 2024 22:22:45 +0100 Subject: [PATCH 044/426] Bump version to 2024.4.0b3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 4fca9fa50c3..f56ce656157 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b2" +PATCH_VERSION: Final = "0b3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 790ee03d76b..73bfdd6d5d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b2" +version = "2024.4.0b3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From e8ee2fd25cd705369f913db98259f7b44ed46584 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Sat, 30 Mar 2024 18:48:57 +0300 Subject: [PATCH 045/426] Cleanup Shelly RGBW light entities (#114410) --- homeassistant/components/shelly/const.py | 2 + homeassistant/components/shelly/light.py | 17 +++++ homeassistant/components/shelly/utils.py | 12 ++++ tests/components/shelly/__init__.py | 12 ++++ tests/components/shelly/conftest.py | 6 ++ tests/components/shelly/test_light.py | 88 +++++++++++++++++++++++- 6 files changed, 134 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index 3580bcf9b38..2ac0416bb6c 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -234,3 +234,5 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( ) CONF_GEN = "gen" + +SHELLY_PLUS_RGBW_CHANNELS = 4 diff --git a/homeassistant/components/shelly/light.py b/homeassistant/components/shelly/light.py index 6c28023a5e3..d0590fc7c20 100644 --- a/homeassistant/components/shelly/light.py +++ b/homeassistant/components/shelly/light.py @@ -14,6 +14,7 @@ from homeassistant.components.light import ( ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_TRANSITION, + DOMAIN as LIGHT_DOMAIN, ColorMode, LightEntity, LightEntityFeature, @@ -34,12 +35,14 @@ from .const import ( RGBW_MODELS, RPC_MIN_TRANSITION_TIME_SEC, SHBLB_1_RGB_EFFECTS, + SHELLY_PLUS_RGBW_CHANNELS, STANDARD_RGB_EFFECTS, ) from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data from .entity import ShellyBlockEntity, ShellyRpcEntity from .utils import ( async_remove_shelly_entity, + async_remove_shelly_rpc_entities, brightness_to_percentage, get_device_entry_gen, get_rpc_key_ids, @@ -118,14 +121,28 @@ def async_setup_rpc_entry( return if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"): + # Light mode remove RGB & RGBW entities, add light entities + async_remove_shelly_rpc_entities( + hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"] + ) async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids) return + light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)] + if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"): + # RGB mode remove light & RGBW entities, add RGB entity + async_remove_shelly_rpc_entities( + hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"] + ) async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids) return if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"): + # RGBW mode remove light & RGB entities, add RGBW entity + async_remove_shelly_rpc_entities( + hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"] + ) async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index d26e3dc11f3..ce98e0d5c12 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -488,3 +488,15 @@ async def async_shutdown_device(device: BlockDevice | RpcDevice) -> None: await device.shutdown() if isinstance(device, BlockDevice): device.shutdown() + + +@callback +def async_remove_shelly_rpc_entities( + hass: HomeAssistant, domain: str, mac: str, keys: list[str] +) -> None: + """Remove RPC based Shelly entity.""" + entity_reg = er_async_get(hass) + for key in keys: + if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"): + LOGGER.debug("Removing entity: %s", entity_id) + entity_reg.async_remove(entity_id) diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 2dc9012d863..348b1115a6f 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -126,6 +126,18 @@ def register_entity( return f"{domain}.{object_id}" +def get_entity( + hass: HomeAssistant, + domain: str, + unique_id: str, +) -> str | None: + """Get Shelly entity.""" + entity_registry = async_get(hass) + return entity_registry.async_get_entity_id( + domain, DOMAIN, f"{MOCK_MAC}-{unique_id}" + ) + + def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: """Return entity state.""" entity = hass.states.get(entity_id) diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 9a73252ca6c..3cd27101f76 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -169,6 +169,9 @@ MOCK_CONFIG = { "input:1": {"id": 1, "type": "analog", "enable": True}, "input:2": {"id": 2, "name": "Gas", "type": "count", "enable": True}, "light:0": {"name": "test light_0"}, + "light:1": {"name": "test light_1"}, + "light:2": {"name": "test light_2"}, + "light:3": {"name": "test light_3"}, "rgb:0": {"name": "test rgb_0"}, "rgbw:0": {"name": "test rgbw_0"}, "switch:0": {"name": "test switch_0"}, @@ -225,6 +228,9 @@ MOCK_STATUS_RPC = { "input:1": {"id": 1, "percent": 89, "xpercent": 8.9}, "input:2": {"id": 2, "counts": {"total": 56174, "xtotal": 561.74}}, "light:0": {"output": True, "brightness": 53.0}, + "light:1": {"output": True, "brightness": 53.0}, + "light:2": {"output": True, "brightness": 53.0}, + "light:3": {"output": True, "brightness": 53.0}, "rgb:0": {"output": True, "brightness": 53.0, "rgb": [45, 55, 65]}, "rgbw:0": {"output": True, "brightness": 53.0, "rgb": [21, 22, 23], "white": 120}, "cloud": {"connected": False}, diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index cca318c364d..2c464a8c39c 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -29,6 +29,7 @@ from homeassistant.components.light import ( ColorMode, LightEntityFeature, ) +from homeassistant.components.shelly.const import SHELLY_PLUS_RGBW_CHANNELS from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, @@ -38,7 +39,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_registry import EntityRegistry -from . import init_integration, mutate_rpc_device_status +from . import get_entity, init_integration, mutate_rpc_device_status, register_entity from .conftest import mock_white_light_set_state RELAY_BLOCK_ID = 0 @@ -587,7 +588,8 @@ async def test_rpc_device_rgb_profile( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device in RGB profile.""" - monkeypatch.delitem(mock_rpc_device.status, "light:0") + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + monkeypatch.delitem(mock_rpc_device.status, f"light:{i}") monkeypatch.delitem(mock_rpc_device.status, "rgbw:0") entity_id = "light.test_rgb_0" await init_integration(hass, 2) @@ -633,7 +635,8 @@ async def test_rpc_device_rgbw_profile( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device in RGBW profile.""" - monkeypatch.delitem(mock_rpc_device.status, "light:0") + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + monkeypatch.delitem(mock_rpc_device.status, f"light:{i}") monkeypatch.delitem(mock_rpc_device.status, "rgb:0") entity_id = "light.test_rgbw_0" await init_integration(hass, 2) @@ -673,3 +676,82 @@ async def test_rpc_device_rgbw_profile( entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-rgbw:0" + + +async def test_rpc_rgbw_device_light_mode_remove_others( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test Shelly RPC RGBW device in light mode removes RGB/RGBW entities.""" + # register lights + monkeypatch.delitem(mock_rpc_device.status, "rgb:0") + monkeypatch.delitem(mock_rpc_device.status, "rgbw:0") + register_entity(hass, LIGHT_DOMAIN, "test_rgb_0", "rgb:0") + register_entity(hass, LIGHT_DOMAIN, "test_rgbw_0", "rgbw:0") + + # verify RGB & RGBW entities created + assert get_entity(hass, LIGHT_DOMAIN, "rgb:0") is not None + assert get_entity(hass, LIGHT_DOMAIN, "rgbw:0") is not None + + # init to remove RGB & RGBW + await init_integration(hass, 2) + + # verify we have 4 lights + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + entity_id = f"light.test_light_{i}" + assert hass.states.get(entity_id).state == STATE_ON + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-light:{i}" + + # verify RGB & RGBW entities removed + assert get_entity(hass, LIGHT_DOMAIN, "rgb:0") is None + assert get_entity(hass, LIGHT_DOMAIN, "rgbw:0") is None + + +@pytest.mark.parametrize( + ("active_mode", "removed_mode"), + [ + ("rgb", "rgbw"), + ("rgbw", "rgb"), + ], +) +async def test_rpc_rgbw_device_rgb_w_modes_remove_others( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + active_mode: str, + removed_mode: str, +) -> None: + """Test Shelly RPC RGBW device in RGB/W modes other lights.""" + removed_key = f"{removed_mode}:0" + + # register lights + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + monkeypatch.delitem(mock_rpc_device.status, f"light:{i}") + entity_id = f"light.test_light_{i}" + register_entity(hass, LIGHT_DOMAIN, entity_id, f"light:{i}") + monkeypatch.delitem(mock_rpc_device.status, f"{removed_mode}:0") + register_entity(hass, LIGHT_DOMAIN, f"test_{removed_key}", removed_key) + + # verify lights entities created + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is not None + assert get_entity(hass, LIGHT_DOMAIN, removed_key) is not None + + await init_integration(hass, 2) + + # verify we have RGB/w light + entity_id = f"light.test_{active_mode}_0" + assert hass.states.get(entity_id).state == STATE_ON + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{active_mode}:0" + + # verify light & RGB/W entities removed + for i in range(SHELLY_PLUS_RGBW_CHANNELS): + assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is None + assert get_entity(hass, LIGHT_DOMAIN, removed_key) is None From 286a09d737ef3caacf7c9d9c8a317fb719968d0e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 29 Mar 2024 18:16:53 -1000 Subject: [PATCH 046/426] Mark executor jobs as background unless created from a tracked task (#114450) * Mark executor jobs as background unless created from a tracked task If the current task is not tracked the executor job should not be a background task to avoid delaying startup and shutdown. Currently any executor job created in a untracked task or background task would end up being tracked and delaying startup/shutdown * import exec has the same issue * Avoid tracking import executor jobs There is no reason to track these jobs as they are always awaited and we do not want to support fire and forget import executor jobs * fix xiaomi_miio * lots of fire time changed without background await * revert changes moved to other PR * more * more * more * m * m * p * fix fire and forget tests * scrape * sonos * system * more * capture callback before block * coverage * more * more races * more races * more * missed some * more fixes * missed some more * fix * remove unneeded * one more race * two --- homeassistant/core.py | 7 +++- .../aurora_abb_powerone/test_sensor.py | 8 ++-- tests/components/cast/test_config_flow.py | 4 +- tests/components/cast/test_media_player.py | 8 ++-- tests/components/fritz/test_image.py | 4 +- tests/components/fritz/test_sensor.py | 2 +- .../components/fritzbox/test_binary_sensor.py | 6 +-- tests/components/fritzbox/test_button.py | 2 +- tests/components/fritzbox/test_climate.py | 12 +++--- tests/components/fritzbox/test_cover.py | 2 +- tests/components/fritzbox/test_light.py | 6 +-- tests/components/fritzbox/test_sensor.py | 6 +-- tests/components/fritzbox/test_switch.py | 6 +-- .../components/geo_rss_events/test_sensor.py | 4 +- tests/components/google_mail/test_sensor.py | 4 +- .../maxcube/test_maxcube_binary_sensor.py | 6 +-- .../maxcube/test_maxcube_climate.py | 22 +++++----- tests/components/metoffice/test_weather.py | 10 ++--- .../mikrotik/test_device_tracker.py | 8 ++-- .../components/monoprice/test_media_player.py | 18 ++++----- .../panasonic_viera/test_media_player.py | 4 +- tests/components/pjlink/test_media_player.py | 4 +- tests/components/profiler/test_init.py | 10 ++--- tests/components/ps4/test_media_player.py | 2 + tests/components/python_script/test_init.py | 40 +++++++++---------- .../components/samsungtv/test_media_player.py | 10 ++--- .../components/schlage/test_binary_sensor.py | 4 +- tests/components/schlage/test_lock.py | 2 +- tests/components/scrape/test_sensor.py | 12 +++--- .../components/solaredge/test_coordinator.py | 10 ++--- tests/components/sonos/conftest.py | 3 +- tests/components/sonos/test_repairs.py | 5 ++- tests/components/sonos/test_sensor.py | 29 +++++++++----- tests/components/sonos/test_speaker.py | 16 +++++++- .../soundtouch/test_media_player.py | 2 +- tests/components/speedtestdotnet/test_init.py | 2 +- .../systemmonitor/test_binary_sensor.py | 2 +- tests/components/systemmonitor/test_sensor.py | 22 +++++----- tests/components/tcp/test_binary_sensor.py | 2 +- tests/components/temper/test_sensor.py | 2 +- .../totalconnect/test_alarm_control_panel.py | 10 ++--- tests/components/uvc/test_camera.py | 12 +++--- tests/components/ws66i/test_media_player.py | 20 +++++----- tests/components/xiaomi_miio/test_vacuum.py | 4 +- .../yale_smart_alarm/test_coordinator.py | 12 +++--- tests/test_core.py | 40 +++++++++++++++++++ 46 files changed, 246 insertions(+), 180 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 2ed4de35925..4794b284fd2 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -774,8 +774,11 @@ class HomeAssistant: ) -> asyncio.Future[_T]: """Add an executor job from within the event loop.""" task = self.loop.run_in_executor(None, target, *args) - self._tasks.add(task) - task.add_done_callback(self._tasks.remove) + + tracked = asyncio.current_task() in self._tasks + task_bucket = self._tasks if tracked else self._background_tasks + task_bucket.add(task) + task.add_done_callback(task_bucket.remove) return task diff --git a/tests/components/aurora_abb_powerone/test_sensor.py b/tests/components/aurora_abb_powerone/test_sensor.py index 178cf165f67..4bc5a5d3086 100644 --- a/tests/components/aurora_abb_powerone/test_sensor.py +++ b/tests/components/aurora_abb_powerone/test_sensor.py @@ -201,7 +201,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory) ): freezer.tick(SCAN_INTERVAL * 2) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) power = hass.states.get("sensor.mydevicename_total_energy") assert power.state == "unknown" # sun rose again @@ -218,7 +218,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory) ): freezer.tick(SCAN_INTERVAL * 4) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) power = hass.states.get("sensor.mydevicename_power_output") assert power is not None assert power.state == "45.7" @@ -237,7 +237,7 @@ async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory) ): freezer.tick(SCAN_INTERVAL * 6) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) power = hass.states.get("sensor.mydevicename_power_output") assert power.state == "unknown" # should this be 'available'? @@ -277,7 +277,7 @@ async def test_sensor_unknown_error( ): freezer.tick(SCAN_INTERVAL * 2) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( "Exception: AuroraError('another error') occurred, 2 retries remaining" in caplog.text diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 62c21fc95ee..a7b9311e88b 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -278,7 +278,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: result["flow_id"], {"known_hosts": "192.168.0.1, 192.168.0.2"} ) assert result["type"] == "create_entry" - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) config_entry = hass.config_entries.async_entries("cast")[0] assert castbrowser_mock.return_value.start_discovery.call_count == 1 @@ -291,7 +291,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: user_input={"known_hosts": "192.168.0.11, 192.168.0.12"}, ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) castbrowser_mock.return_value.start_discovery.assert_not_called() castbrowser_mock.assert_not_called() diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index 9ef31457d5c..8381f27398a 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -137,8 +137,8 @@ async def async_setup_cast_internal_discovery(hass, config=None): return_value=browser, ) as cast_browser: add_entities = await async_setup_cast(hass, config) - await hass.async_block_till_done() - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done(wait_background_tasks=True) assert browser.start_discovery.call_count == 1 @@ -209,8 +209,8 @@ async def async_setup_media_player_cast(hass: HomeAssistant, info: ChromecastInf entry = MockConfigEntry(data=data, domain="cast") entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done(wait_background_tasks=True) discovery_callback = cast_browser.call_args[0][0].add_cast diff --git a/tests/components/fritz/test_image.py b/tests/components/fritz/test_image.py index 85d02eff153..5d6b9265760 100644 --- a/tests/components/fritz/test_image.py +++ b/tests/components/fritz/test_image.py @@ -199,7 +199,7 @@ async def test_image_update_unavailable( # fritzbox becomes unavailable fc_class_mock().call_action_side_effect(ReadTimeout) async_fire_time_changed(hass, utcnow() + timedelta(seconds=60)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("image.mock_title_guestwifi") assert state.state == STATE_UNKNOWN @@ -207,7 +207,7 @@ async def test_image_update_unavailable( # fritzbox is available again fc_class_mock().call_action_side_effect(None) async_fire_time_changed(hass, utcnow() + timedelta(seconds=60)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("image.mock_title_guestwifi") assert state.state != STATE_UNKNOWN diff --git a/tests/components/fritz/test_sensor.py b/tests/components/fritz/test_sensor.py index 4427fc6961e..37116e66719 100644 --- a/tests/components/fritz/test_sensor.py +++ b/tests/components/fritz/test_sensor.py @@ -134,7 +134,7 @@ async def test_sensor_update_fail( fc_class_mock().call_action_side_effect(FritzConnectionException) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=300)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) sensors = hass.states.async_all(SENSOR_DOMAIN) for sensor in sensors: diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index 3828cedc67f..3e1a2691f67 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -104,7 +104,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 @@ -123,7 +123,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 @@ -146,7 +146,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_device_alarm") assert state diff --git a/tests/components/fritzbox/test_button.py b/tests/components/fritzbox/test_button.py index f254b2e0710..89e8d8357dd 100644 --- a/tests/components/fritzbox/test_button.py +++ b/tests/components/fritzbox/test_button.py @@ -65,7 +65,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_template") assert state diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index a201eab3665..073a67f22c1 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -145,7 +145,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_next_scheduled_preset") assert state @@ -203,7 +203,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert fritz().update_devices.call_count == 2 @@ -243,7 +243,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 @@ -386,7 +386,7 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert fritz().update_devices.call_count == 2 @@ -397,7 +397,7 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert fritz().update_devices.call_count == 3 @@ -422,7 +422,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index b723ac97d06..6c301fc8f46 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -108,7 +108,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index b750a2e9275..45920c7c3ee 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -237,7 +237,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 @@ -259,7 +259,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 @@ -294,7 +294,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_light") assert state diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index 48b769eaac2..63d0b67d7f4 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -87,7 +87,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 @@ -105,7 +105,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 @@ -128,7 +128,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_device_temperature") assert state diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index 67393bc09a5..417b355b396 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -151,7 +151,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 @@ -169,7 +169,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 @@ -207,7 +207,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(f"{DOMAIN}.new_switch") assert state diff --git a/tests/components/geo_rss_events/test_sensor.py b/tests/components/geo_rss_events/test_sensor.py index 76f1709bd75..d19262c3339 100644 --- a/tests/components/geo_rss_events/test_sensor.py +++ b/tests/components/geo_rss_events/test_sensor.py @@ -99,7 +99,7 @@ async def test_setup( # so no changes to entities. mock_feed.return_value.update.return_value = "OK_NO_DATA", None async_fire_time_changed(hass, utcnow + geo_rss_events.SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) all_states = hass.states.async_all() assert len(all_states) == 1 @@ -109,7 +109,7 @@ async def test_setup( # Simulate an update - empty data, removes all entities mock_feed.return_value.update.return_value = "ERROR", None async_fire_time_changed(hass, utcnow + 2 * geo_rss_events.SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) all_states = hass.states.async_all() assert len(all_states) == 1 diff --git a/tests/components/google_mail/test_sensor.py b/tests/components/google_mail/test_sensor.py index e0b072d4b7d..6f2f1a4ec32 100644 --- a/tests/components/google_mail/test_sensor.py +++ b/tests/components/google_mail/test_sensor.py @@ -46,7 +46,7 @@ async def test_sensors( ): next_update = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(SENSOR) assert state.state == result @@ -61,7 +61,7 @@ async def test_sensor_reauth_trigger( with patch(TOKEN, side_effect=RefreshError): next_update = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) flows = hass.config_entries.flow.async_progress() diff --git a/tests/components/maxcube/test_maxcube_binary_sensor.py b/tests/components/maxcube/test_maxcube_binary_sensor.py index cc86f389884..32ec4e92ee1 100644 --- a/tests/components/maxcube/test_maxcube_binary_sensor.py +++ b/tests/components/maxcube/test_maxcube_binary_sensor.py @@ -43,7 +43,7 @@ async def test_window_shuttler( windowshutter.is_open = False async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == STATE_OFF @@ -68,12 +68,12 @@ async def test_window_shuttler_battery( windowshutter.battery = 1 # maxcube-api MAX_DEVICE_BATTERY_LOW async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(BATTERY_ENTITY_ID) assert state.state == STATE_ON # on means low windowshutter.battery = 0 # maxcube-api MAX_DEVICE_BATTERY_OK async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(BATTERY_ENTITY_ID) assert state.state == STATE_OFF # off means normal diff --git a/tests/components/maxcube/test_maxcube_climate.py b/tests/components/maxcube/test_maxcube_climate.py index cb4dc510605..e1e7dc57c47 100644 --- a/tests/components/maxcube/test_maxcube_climate.py +++ b/tests/components/maxcube/test_maxcube_climate.py @@ -140,7 +140,7 @@ async def test_thermostat_set_hvac_mode_off( thermostat.valve_position = 0 async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.OFF @@ -168,8 +168,8 @@ async def test_thermostat_set_hvac_mode_heat( thermostat.mode = MAX_DEVICE_MODE_MANUAL async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT @@ -204,7 +204,7 @@ async def test_thermostat_set_temperature( thermostat.valve_position = 0 async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.AUTO @@ -248,7 +248,7 @@ async def test_thermostat_set_preset_on( thermostat.target_temperature = ON_TEMPERATURE async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT @@ -273,7 +273,7 @@ async def test_thermostat_set_preset_comfort( thermostat.target_temperature = thermostat.comfort_temperature async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT @@ -298,7 +298,7 @@ async def test_thermostat_set_preset_eco( thermostat.target_temperature = thermostat.eco_temperature async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT @@ -323,7 +323,7 @@ async def test_thermostat_set_preset_away( thermostat.target_temperature = thermostat.eco_temperature async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT @@ -348,7 +348,7 @@ async def test_thermostat_set_preset_boost( thermostat.target_temperature = thermostat.eco_temperature async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.AUTO @@ -401,7 +401,7 @@ async def test_wallthermostat_set_hvac_mode_heat( wallthermostat.target_temperature = MIN_TEMPERATURE async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(WALL_ENTITY_ID) assert state.state == HVACMode.HEAT @@ -425,7 +425,7 @@ async def test_wallthermostat_set_hvac_mode_auto( wallthermostat.target_temperature = 23.0 async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(WALL_ENTITY_ID) assert state.state == HVACMode.AUTO diff --git a/tests/components/metoffice/test_weather.py b/tests/components/metoffice/test_weather.py index 2aa673d4010..64a85897738 100644 --- a/tests/components/metoffice/test_weather.py +++ b/tests/components/metoffice/test_weather.py @@ -125,7 +125,7 @@ async def test_site_cannot_update( future_time = utcnow() + timedelta(minutes=20) async_fire_time_changed(hass, future_time) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) weather = hass.states.get("weather.met_office_wavertree_daily") assert weather.state == STATE_UNAVAILABLE @@ -297,7 +297,7 @@ async def test_forecast_service( # Trigger data refetch freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert wavertree_data["wavertree_daily_mock"].call_count == 2 assert wavertree_data["wavertree_hourly_mock"].call_count == 1 @@ -324,7 +324,7 @@ async def test_forecast_service( freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) response = await hass.services.async_call( WEATHER_DOMAIN, @@ -412,7 +412,7 @@ async def test_forecast_subscription( freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) msg = await client.receive_json() assert msg["id"] == subscription_id @@ -430,6 +430,6 @@ async def test_forecast_subscription( ) freezer.tick(timedelta(seconds=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) msg = await client.receive_json() assert msg["success"] diff --git a/tests/components/mikrotik/test_device_tracker.py b/tests/components/mikrotik/test_device_tracker.py index 47ddc038f69..89dc37fd781 100644 --- a/tests/components/mikrotik/test_device_tracker.py +++ b/tests/components/mikrotik/test_device_tracker.py @@ -88,7 +88,7 @@ async def test_device_trackers( WIRELESS_DATA.append(DEVICE_2_WIRELESS) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) device_2 = hass.states.get("device_tracker.device_2") assert device_2 @@ -101,7 +101,7 @@ async def test_device_trackers( del WIRELESS_DATA[1] # device 2 is removed from wireless list with freeze_time(utcnow() + timedelta(minutes=4)): async_fire_time_changed(hass, utcnow() + timedelta(minutes=4)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) device_2 = hass.states.get("device_tracker.device_2") assert device_2 @@ -110,7 +110,7 @@ async def test_device_trackers( # test state changes to away if last_seen past consider_home_interval with freeze_time(utcnow() + timedelta(minutes=6)): async_fire_time_changed(hass, utcnow() + timedelta(minutes=6)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) device_2 = hass.states.get("device_tracker.device_2") assert device_2 @@ -266,7 +266,7 @@ async def test_update_failed(hass: HomeAssistant, mock_device_registry_devices) mikrotik.hub.MikrotikData, "command", side_effect=mikrotik.errors.CannotConnect ): async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) device_1 = hass.states.get("device_tracker.device_1") assert device_1 diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index a0afd37f3b2..f7d88692cf5 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -183,7 +183,7 @@ async def test_service_calls_with_entity_id(hass: HomeAssistant) -> None: # Restoring other media player to its previous state # The zone should not be restored await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_2_ID}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Checking that values were not (!) restored state = hass.states.get(ZONE_1_ID) @@ -193,7 +193,7 @@ async def test_service_calls_with_entity_id(hass: HomeAssistant) -> None: # Restoring media player to its previous state await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_1_ID}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -226,7 +226,7 @@ async def test_service_calls_with_all_entities(hass: HomeAssistant) -> None: # Restoring media player to its previous state await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": "all"}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -259,7 +259,7 @@ async def test_service_calls_without_relevant_entities(hass: HomeAssistant) -> N # Restoring media player to its previous state await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": "light.demo"}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -273,7 +273,7 @@ async def test_restore_without_snapshort(hass: HomeAssistant) -> None: with patch.object(MockMonoprice, "restore_zone") as method_call: await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_1_ID}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not method_call.called @@ -295,7 +295,7 @@ async def test_update(hass: HomeAssistant) -> None: monoprice.set_volume(11, 38) await async_update_entity(hass, ZONE_1_ID) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -321,7 +321,7 @@ async def test_failed_update(hass: HomeAssistant) -> None: with patch.object(MockMonoprice, "zone_status", side_effect=SerialException): await async_update_entity(hass, ZONE_1_ID) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -347,7 +347,7 @@ async def test_empty_update(hass: HomeAssistant) -> None: with patch.object(MockMonoprice, "zone_status", return_value=None): await async_update_entity(hass, ZONE_1_ID) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -418,7 +418,7 @@ async def test_unknown_source(hass: HomeAssistant) -> None: monoprice.set_source(11, 5) await async_update_entity(hass, ZONE_1_ID) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) diff --git a/tests/components/panasonic_viera/test_media_player.py b/tests/components/panasonic_viera/test_media_player.py index 1203bf1ed51..dab56542e6a 100644 --- a/tests/components/panasonic_viera/test_media_player.py +++ b/tests/components/panasonic_viera/test_media_player.py @@ -23,7 +23,7 @@ async def test_media_player_handle_URLerror( mock_remote.get_mute = Mock(side_effect=URLError(None, None)) async_fire_time_changed(hass, utcnow() + timedelta(minutes=2)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state_tv = hass.states.get("media_player.panasonic_viera_tv") assert state_tv.state == STATE_UNAVAILABLE @@ -41,7 +41,7 @@ async def test_media_player_handle_HTTPError( mock_remote.get_mute = Mock(side_effect=HTTPError(None, 400, None, None, None)) async_fire_time_changed(hass, utcnow() + timedelta(minutes=2)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state_tv = hass.states.get("media_player.panasonic_viera_tv") assert state_tv.state == STATE_OFF diff --git a/tests/components/pjlink/test_media_player.py b/tests/components/pjlink/test_media_player.py index a6d17233450..d44bc942290 100644 --- a/tests/components/pjlink/test_media_player.py +++ b/tests/components/pjlink/test_media_player.py @@ -208,7 +208,7 @@ async def test_update_unavailable(projector_from_address, hass: HomeAssistant) - projector_from_address.side_effect = socket.timeout async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.test") assert state.state == "unavailable" @@ -237,7 +237,7 @@ async def test_unavailable_time(mocked_projector, hass: HomeAssistant) -> None: mocked_projector.get_power.side_effect = ProjectorError("unavailable time") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.test") assert state.state == "off" diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 1140dc74849..3cade465347 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -332,7 +332,7 @@ async def test_log_object_sources( caplog.clear() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=11)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "No new object growth found" in caplog.text fake_object2 = FakeObject() @@ -344,7 +344,7 @@ async def test_log_object_sources( caplog.clear() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "New object FakeObject (1/2)" in caplog.text many_objects = [FakeObject() for _ in range(30)] @@ -352,7 +352,7 @@ async def test_log_object_sources( caplog.clear() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "New object FakeObject (2/30)" in caplog.text assert "New objects overflowed by {'FakeObject': 25}" in caplog.text @@ -362,7 +362,7 @@ async def test_log_object_sources( caplog.clear() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=41)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "FakeObject" not in caplog.text assert "No new object growth found" not in caplog.text @@ -370,7 +370,7 @@ async def test_log_object_sources( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=51)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "FakeObject" not in caplog.text assert "No new object growth found" not in caplog.text diff --git a/tests/components/ps4/test_media_player.py b/tests/components/ps4/test_media_player.py index 875b049d8c3..6adcad03016 100644 --- a/tests/components/ps4/test_media_player.py +++ b/tests/components/ps4/test_media_player.py @@ -234,6 +234,7 @@ async def test_media_attributes_are_fetched(hass: HomeAssistant) -> None: with patch(mock_func, return_value=mock_result) as mock_fetch: await mock_ddp_response(hass, MOCK_STATUS_PLAYING) + await hass.async_block_till_done(wait_background_tasks=True) mock_state = hass.states.get(mock_entity_id) mock_attrs = dict(mock_state.attributes) @@ -255,6 +256,7 @@ async def test_media_attributes_are_fetched(hass: HomeAssistant) -> None: with patch(mock_func, return_value=mock_result) as mock_fetch_app: await mock_ddp_response(hass, MOCK_STATUS_PLAYING) + await hass.async_block_till_done(wait_background_tasks=True) mock_state = hass.states.get(mock_entity_id) mock_attrs = dict(mock_state.attributes) diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index bec94db71f9..1c6fead6c4a 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -78,7 +78,7 @@ hass.states.set('test.entity', data.get('name', 'not set')) """ hass.async_add_executor_job(execute, hass, "test.py", source, {"name": "paulus"}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state("test.entity", "paulus") @@ -96,7 +96,7 @@ print("This triggers warning.") """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "Don't use print() inside scripts." in caplog.text @@ -111,7 +111,7 @@ logger.info('Logging from inside script') """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "Logging from inside script" in caplog.text @@ -126,7 +126,7 @@ this is not valid Python """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "Error loading script test.py" in caplog.text @@ -140,8 +140,8 @@ async def test_execute_runtime_error( raise Exception('boom') """ - hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) assert "Error executing script: boom" in caplog.text @@ -153,7 +153,7 @@ raise Exception('boom') """ task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert type(task.exception()) == HomeAssistantError assert "Error executing script (Exception): boom" in str(task.exception()) @@ -168,7 +168,7 @@ async def test_accessing_async_methods( hass.async_stop() """ - hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_add_executor_job(execute, hass, "test.py", source, {}) await hass.async_block_till_done() assert "Not allowed to access async methods" in caplog.text @@ -181,7 +181,7 @@ hass.async_stop() """ task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert type(task.exception()) == ServiceValidationError assert "Not allowed to access async methods" in str(task.exception()) @@ -198,7 +198,7 @@ mylist = [1, 2, 3, 4] logger.info('Logging from inside script: %s %s' % (mydict["a"], mylist[2])) """ - hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_add_executor_job(execute, hass, "test.py", source, {}) await hass.async_block_till_done() assert "Logging from inside script: 1 3" in caplog.text @@ -217,7 +217,7 @@ async def test_accessing_forbidden_methods( "time.tzset()": "TimeWrapper.tzset", }.items(): caplog.records.clear() - hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_add_executor_job(execute, hass, "test.py", source, {}) await hass.async_block_till_done() assert f"Not allowed to access {name}" in caplog.text @@ -231,7 +231,7 @@ async def test_accessing_forbidden_methods_with_response(hass: HomeAssistant) -> "time.tzset()": "TimeWrapper.tzset", }.items(): task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert type(task.exception()) == ServiceValidationError assert f"Not allowed to access {name}" in str(task.exception()) @@ -244,7 +244,7 @@ for i in [1, 2]: hass.states.set('hello.{}'.format(i), 'world') """ - hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_add_executor_job(execute, hass, "test.py", source, {}) await hass.async_block_till_done() assert hass.states.is_state("hello.1", "world") @@ -279,7 +279,7 @@ hass.states.set('hello.ab_list', '{}'.format(ab_list)) """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state("hello.a", "1") assert hass.states.is_state("hello.b", "2") @@ -302,7 +302,7 @@ hass.states.set('hello.b', a[1]) hass.states.set('hello.c', a[2]) """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state("hello.a", "1") assert hass.states.is_state("hello.b", "2") @@ -325,7 +325,7 @@ hass.states.set('module.datetime', """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state("module.time", "1986") assert hass.states.is_state("module.time_strptime", "12:34") @@ -351,7 +351,7 @@ def b(): b() """ hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state("hello.a", "one") assert hass.states.is_state("hello.b", "two") @@ -517,7 +517,7 @@ time.sleep(5) with patch("homeassistant.components.python_script.time.sleep"): hass.async_add_executor_job(execute, hass, "test.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert caplog.text.count("time.sleep") == 1 @@ -664,7 +664,7 @@ hass.states.set('hello.c', c) """ hass.async_add_executor_job(execute, hass, "aug_assign.py", source, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get("hello.a").state == str(((10 + 20) * 5) - 8) assert hass.states.get("hello.b").state == ("foo" + "bar") * 2 @@ -686,5 +686,5 @@ async def test_prohibited_augmented_assignment_operations( ) -> None: """Test that prohibited augmented assignment operations raise an error.""" hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert error in caplog.text diff --git a/tests/components/samsungtv/test_media_player.py b/tests/components/samsungtv/test_media_player.py index f874b92305b..db4f3f0e41f 100644 --- a/tests/components/samsungtv/test_media_player.py +++ b/tests/components/samsungtv/test_media_player.py @@ -200,7 +200,7 @@ async def test_setup_websocket_2( next_update = mock_now + timedelta(minutes=5) freezer.move_to(next_update) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(entity_id) assert state @@ -225,7 +225,7 @@ async def test_setup_encrypted_websocket( next_update = mock_now + timedelta(minutes=5) freezer.move_to(next_update) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state @@ -242,7 +242,7 @@ async def test_update_on( next_update = mock_now + timedelta(minutes=5) freezer.move_to(next_update) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -262,7 +262,7 @@ async def test_update_off( next_update = mock_now + timedelta(minutes=5) freezer.move_to(next_update) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == STATE_UNAVAILABLE @@ -290,7 +290,7 @@ async def test_update_off_ws_no_power_state( next_update = mock_now + timedelta(minutes=5) freezer.move_to(next_update) async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ENTITY_ID) assert state.state == STATE_OFF diff --git a/tests/components/schlage/test_binary_sensor.py b/tests/components/schlage/test_binary_sensor.py index 4673f263c8c..97f11577b86 100644 --- a/tests/components/schlage/test_binary_sensor.py +++ b/tests/components/schlage/test_binary_sensor.py @@ -22,7 +22,7 @@ async def test_keypad_disabled_binary_sensor( # Make the coordinator refresh data. async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None @@ -43,7 +43,7 @@ async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( # Make the coordinator refresh data. async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None diff --git a/tests/components/schlage/test_lock.py b/tests/components/schlage/test_lock.py index 0972aa97033..5b26da7b27e 100644 --- a/tests/components/schlage/test_lock.py +++ b/tests/components/schlage/test_lock.py @@ -59,7 +59,7 @@ async def test_changed_by( # Make the coordinator refresh data. async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) mock_lock.last_changed_by.assert_called_once_with() lock_device = hass.states.get("lock.vault_door") diff --git a/tests/components/scrape/test_sensor.py b/tests/components/scrape/test_sensor.py index 41da2eb9a79..4d9c2b732dc 100644 --- a/tests/components/scrape/test_sensor.py +++ b/tests/components/scrape/test_sensor.py @@ -261,7 +261,7 @@ async def test_scrape_sensor_no_data_refresh(hass: HomeAssistant) -> None: mocker.payload = "test_scrape_sensor_no_data" async_fire_time_changed(hass, dt_util.utcnow() + DEFAULT_SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.ha_version") assert state is not None @@ -541,7 +541,7 @@ async def test_templates_with_yaml(hass: HomeAssistant) -> None: hass, dt_util.utcnow() + timedelta(minutes=10), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.get_values_with_template") assert state.state == "Current Version: 2021.12.10" @@ -555,7 +555,7 @@ async def test_templates_with_yaml(hass: HomeAssistant) -> None: hass, dt_util.utcnow() + timedelta(minutes=20), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.get_values_with_template") assert state.state == STATE_UNAVAILABLE @@ -568,7 +568,7 @@ async def test_templates_with_yaml(hass: HomeAssistant) -> None: hass, dt_util.utcnow() + timedelta(minutes=30), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.get_values_with_template") assert state.state == "Current Version: 2021.12.10" @@ -608,7 +608,7 @@ async def test_availability( hass.states.async_set("sensor.input1", "on") freezer.tick(timedelta(minutes=10)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.current_version") assert state.state == "2021.12.10" @@ -618,7 +618,7 @@ async def test_availability( freezer.tick(timedelta(minutes=10)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.current_version") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/solaredge/test_coordinator.py b/tests/components/solaredge/test_coordinator.py index 4bd9dee930c..b1496d18d93 100644 --- a/tests/components/solaredge/test_coordinator.py +++ b/tests/components/solaredge/test_coordinator.py @@ -53,7 +53,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge().get_overview.return_value = mock_overview_data freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.solaredge_lifetime_energy") assert state assert state.state == str(mock_overview_data["overview"]["lifeTimeData"]["energy"]) @@ -63,7 +63,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge().get_overview.return_value = mock_overview_data freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.solaredge_lifetime_energy") assert state @@ -74,7 +74,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge().get_overview.return_value = mock_overview_data freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.solaredge_lifetime_energy") assert state @@ -85,7 +85,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge().get_overview.return_value = mock_overview_data freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.solaredge_energy_this_year") assert state @@ -103,7 +103,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge().get_overview.return_value = mock_overview_data freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.solaredge_lifetime_energy") assert state diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 0b3834992d8..00858a180a3 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -94,8 +94,9 @@ def async_setup_sonos(hass, config_entry, fire_zgs_event): async def _wrapper(): config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) await fire_zgs_event() + await hass.async_block_till_done(wait_background_tasks=True) return _wrapper diff --git a/tests/components/sonos/test_repairs.py b/tests/components/sonos/test_repairs.py index cc1f59c5cd0..cf64912e498 100644 --- a/tests/components/sonos/test_repairs.py +++ b/tests/components/sonos/test_repairs.py @@ -28,10 +28,12 @@ async def test_subscription_repair_issues( config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + await hass.async_block_till_done() # Ensure an issue is registered on subscription failure + sub_callback = subscription.callback async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert issue_registry.async_get_issue(DOMAIN, SUB_FAIL_ISSUE_ID) # Ensure the issue still exists after reload @@ -42,7 +44,6 @@ async def test_subscription_repair_issues( # Ensure the issue has been removed after a successful subscription callback variables = {"ZoneGroupState": zgs_discovery} event = SonosMockEvent(soco, soco.zoneGroupTopology, variables) - sub_callback = subscription.callback sub_callback(event) await hass.async_block_till_done() assert not issue_registry.async_get_issue(DOMAIN, SUB_FAIL_ISSUE_ID) diff --git a/tests/components/sonos/test_sensor.py b/tests/components/sonos/test_sensor.py index 6e4461e5397..1f4ba8d22cd 100644 --- a/tests/components/sonos/test_sensor.py +++ b/tests/components/sonos/test_sensor.py @@ -26,6 +26,7 @@ async def test_entity_registry_unsupported( soco.get_battery_info.side_effect = NotSupportedException await async_setup_sonos() + await hass.async_block_till_done(wait_background_tasks=True) assert "media_player.zone_a" in entity_registry.entities assert "sensor.zone_a_battery" not in entity_registry.entities @@ -36,6 +37,8 @@ async def test_entity_registry_supported( hass: HomeAssistant, async_autosetup_sonos, soco, entity_registry: er.EntityRegistry ) -> None: """Test sonos device with battery registered in the device registry.""" + await hass.async_block_till_done(wait_background_tasks=True) + assert "media_player.zone_a" in entity_registry.entities assert "sensor.zone_a_battery" in entity_registry.entities assert "binary_sensor.zone_a_charging" in entity_registry.entities @@ -69,6 +72,7 @@ async def test_battery_on_s1( soco.get_battery_info.return_value = {} await async_setup_sonos() + await hass.async_block_till_done(wait_background_tasks=True) subscription = soco.deviceProperties.subscribe.return_value sub_callback = subscription.callback @@ -78,7 +82,7 @@ async def test_battery_on_s1( # Update the speaker with a callback event sub_callback(device_properties_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) battery = entity_registry.entities["sensor.zone_a_battery"] battery_state = hass.states.get(battery.entity_id) @@ -101,6 +105,7 @@ async def test_device_payload_without_battery( soco.get_battery_info.return_value = None await async_setup_sonos() + await hass.async_block_till_done(wait_background_tasks=True) subscription = soco.deviceProperties.subscribe.return_value sub_callback = subscription.callback @@ -109,7 +114,7 @@ async def test_device_payload_without_battery( device_properties_event.variables["more_info"] = bad_payload sub_callback(device_properties_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert bad_payload in caplog.text @@ -125,6 +130,7 @@ async def test_device_payload_without_battery_and_ignored_keys( soco.get_battery_info.return_value = None await async_setup_sonos() + await hass.async_block_till_done(wait_background_tasks=True) subscription = soco.deviceProperties.subscribe.return_value sub_callback = subscription.callback @@ -133,7 +139,7 @@ async def test_device_payload_without_battery_and_ignored_keys( device_properties_event.variables["more_info"] = ignored_payload sub_callback(device_properties_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ignored_payload not in caplog.text @@ -150,7 +156,7 @@ async def test_audio_input_sensor( subscription = soco.avTransport.subscribe.return_value sub_callback = subscription.callback sub_callback(tv_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) audio_input_sensor = entity_registry.entities["sensor.zone_a_audio_input_format"] audio_input_state = hass.states.get(audio_input_sensor.entity_id) @@ -161,7 +167,7 @@ async def test_audio_input_sensor( type(soco).soundbar_audio_input_format = no_input_mock async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) no_input_mock.assert_called_once() audio_input_state = hass.states.get(audio_input_sensor.entity_id) @@ -169,13 +175,13 @@ async def test_audio_input_sensor( # Ensure state is not polled when source is not TV and state is already "No input" sub_callback(no_media_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) unpolled_mock = PropertyMock(return_value="Will not be polled") type(soco).soundbar_audio_input_format = unpolled_mock async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) unpolled_mock.assert_not_called() audio_input_state = hass.states.get(audio_input_sensor.entity_id) @@ -199,7 +205,7 @@ async def test_microphone_binary_sensor( # Update the speaker with a callback event subscription = soco.deviceProperties.subscribe.return_value subscription.callback(device_properties_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) mic_binary_sensor_state = hass.states.get(mic_binary_sensor.entity_id) assert mic_binary_sensor_state.state == STATE_ON @@ -225,17 +231,18 @@ async def test_favorites_sensor( empty_event = SonosMockEvent(soco, service, {}) subscription = service.subscribe.return_value subscription.callback(event=empty_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Reload the integration to enable the sensor async_fire_time_changed( hass, dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Trigger subscription callback for speaker discovery await fire_zgs_event() + await hass.async_block_till_done(wait_background_tasks=True) favorites_updated_event = SonosMockEvent( soco, service, {"favorites_update_id": "2", "container_update_i_ds": "FV:2,2"} @@ -245,4 +252,4 @@ async def test_favorites_sensor( return_value=True, ): subscription.callback(event=favorites_updated_event) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/sonos/test_speaker.py b/tests/components/sonos/test_speaker.py index e0fc4c3baf9..2c4357060be 100644 --- a/tests/components/sonos/test_speaker.py +++ b/tests/components/sonos/test_speaker.py @@ -12,9 +12,20 @@ from tests.common import async_fire_time_changed async def test_fallback_to_polling( - hass: HomeAssistant, async_autosetup_sonos, soco, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + config_entry, + soco, + fire_zgs_event, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that polling fallback works.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + # Do not wait on background tasks here because the + # subscription callback will fire an unsub the polling check + await hass.async_block_till_done() + await fire_zgs_event() + speaker = list(hass.data[DATA_SONOS].discovered.values())[0] assert speaker.soco is soco assert speaker._subscriptions @@ -30,7 +41,7 @@ async def test_fallback_to_polling( ), ): async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not speaker._subscriptions assert speaker.subscriptions_failed @@ -46,6 +57,7 @@ async def test_subscription_creation_fails( side_effect=ConnectionError("Took too long"), ): await async_setup_sonos() + await hass.async_block_till_done(wait_background_tasks=True) speaker = list(hass.data[DATA_SONOS].discovered.values())[0] assert not speaker._subscriptions diff --git a/tests/components/soundtouch/test_media_player.py b/tests/components/soundtouch/test_media_player.py index 94e6965a571..61d0c7b4ea5 100644 --- a/tests/components/soundtouch/test_media_player.py +++ b/tests/components/soundtouch/test_media_player.py @@ -665,7 +665,7 @@ async def test_zone_attributes( hass, dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) entity_1_state = hass.states.get(DEVICE_1_ENTITY_ID) assert entity_1_state.attributes[ATTR_SOUNDTOUCH_ZONE]["is_master"] diff --git a/tests/components/speedtestdotnet/test_init.py b/tests/components/speedtestdotnet/test_init.py index 5083f56a8e2..2b0f803eb6f 100644 --- a/tests/components/speedtestdotnet/test_init.py +++ b/tests/components/speedtestdotnet/test_init.py @@ -74,7 +74,7 @@ async def test_server_not_found(hass: HomeAssistant, mock_api: MagicMock) -> Non hass, dt_util.utcnow() + timedelta(minutes=61), ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.speedtest_ping") assert state is not None assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/systemmonitor/test_binary_sensor.py b/tests/components/systemmonitor/test_binary_sensor.py index 51c8fc87a3a..e3fbdedc081 100644 --- a/tests/components/systemmonitor/test_binary_sensor.py +++ b/tests/components/systemmonitor/test_binary_sensor.py @@ -97,7 +97,7 @@ async def test_sensor_process_fails( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) process_sensor = hass.states.get("binary_sensor.system_monitor_process_python3") assert process_sensor is not None diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index 11dd002c2f7..a11112d8f86 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -232,7 +232,7 @@ async def test_sensor_updating( mock_psutil.virtual_memory.side_effect = Exception("Failed to update") freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) memory_sensor = hass.states.get("sensor.system_monitor_memory_free") assert memory_sensor is not None @@ -248,7 +248,7 @@ async def test_sensor_updating( ) freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) memory_sensor = hass.states.get("sensor.system_monitor_memory_free") assert memory_sensor is not None @@ -293,7 +293,7 @@ async def test_sensor_process_fails( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) process_sensor = hass.states.get("sensor.system_monitor_process_python3") assert process_sensor is not None @@ -330,7 +330,7 @@ async def test_sensor_network_sensors( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) network_out_sensor = hass.states.get("sensor.system_monitor_network_out_eth1") packets_out_sensor = hass.states.get("sensor.system_monitor_packets_out_eth1") @@ -362,7 +362,7 @@ async def test_sensor_network_sensors( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) network_out_sensor = hass.states.get("sensor.system_monitor_network_out_eth1") packets_out_sensor = hass.states.get("sensor.system_monitor_packets_out_eth1") @@ -470,7 +470,7 @@ async def test_exception_handling_disk_sensor( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "OS error for /" in caplog.text @@ -483,7 +483,7 @@ async def test_exception_handling_disk_sensor( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert "OS error for /" in caplog.text @@ -498,7 +498,7 @@ async def test_exception_handling_disk_sensor( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) disk_sensor = hass.states.get("sensor.system_monitor_disk_free") assert disk_sensor is not None @@ -528,7 +528,7 @@ async def test_cpu_percentage_is_zero_returns_unknown( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) cpu_sensor = hass.states.get("sensor.system_monitor_processor_use") assert cpu_sensor is not None @@ -538,7 +538,7 @@ async def test_cpu_percentage_is_zero_returns_unknown( freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) cpu_sensor = hass.states.get("sensor.system_monitor_processor_use") assert cpu_sensor is not None @@ -573,7 +573,7 @@ async def test_remove_obsolete_entities( ) freezer.tick(timedelta(minutes=5)) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Fake an entity which should be removed as not supported and disabled entity_registry.async_get_or_create( diff --git a/tests/components/tcp/test_binary_sensor.py b/tests/components/tcp/test_binary_sensor.py index 959c1f050fd..05aa2a471db 100644 --- a/tests/components/tcp/test_binary_sensor.py +++ b/tests/components/tcp/test_binary_sensor.py @@ -79,7 +79,7 @@ async def test_state(hass: HomeAssistant, mock_socket, now) -> None: mock_socket.recv.return_value = b"on" async_fire_time_changed(hass, now + timedelta(seconds=45)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(TEST_ENTITY) diff --git a/tests/components/temper/test_sensor.py b/tests/components/temper/test_sensor.py index 94c44cc4296..d1e74f1ab0f 100644 --- a/tests/components/temper/test_sensor.py +++ b/tests/components/temper/test_sensor.py @@ -29,7 +29,7 @@ async def test_temperature_readback(hass: HomeAssistant) -> None: await hass.async_block_till_done() async_fire_time_changed(hass, utcnow + timedelta(seconds=70)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) temperature = hass.states.get("sensor.mydevicename") assert temperature diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index 7ac6540f1ff..fa2e997756d 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -548,30 +548,30 @@ async def test_other_update_failures(hass: HomeAssistant) -> None: # then an error: ServiceUnavailable --> UpdateFailed async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 2 # works again async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 2) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 3 # then an error: TotalConnectError --> UpdateFailed async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 3) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 4 # works again async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 4) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 5 # unknown TotalConnect status via ValueError async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 5) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 6 diff --git a/tests/components/uvc/test_camera.py b/tests/components/uvc/test_camera.py index 12203a3e222..522448ecfc4 100644 --- a/tests/components/uvc/test_camera.py +++ b/tests/components/uvc/test_camera.py @@ -278,7 +278,7 @@ async def test_setup_nvr_errors_during_indexing( mock_remote.return_value.index.side_effect = None async_fire_time_changed(hass, now + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) camera_states = hass.states.async_all("camera") @@ -313,7 +313,7 @@ async def test_setup_nvr_errors_during_initialization( mock_remote.side_effect = None async_fire_time_changed(hass, now + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) camera_states = hass.states.async_all("camera") @@ -362,7 +362,7 @@ async def test_motion_recording_mode_properties( ] = True async_fire_time_changed(hass, now + timedelta(seconds=31)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("camera.front") @@ -375,7 +375,7 @@ async def test_motion_recording_mode_properties( mock_remote.return_value.get_camera.return_value["recordingIndicator"] = "DISABLED" async_fire_time_changed(hass, now + timedelta(seconds=61)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("camera.front") @@ -387,7 +387,7 @@ async def test_motion_recording_mode_properties( ) async_fire_time_changed(hass, now + timedelta(seconds=91)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("camera.front") @@ -399,7 +399,7 @@ async def test_motion_recording_mode_properties( ) async_fire_time_changed(hass, now + timedelta(seconds=121)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("camera.front") diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index eec6bf191f7..c13f6cbd738 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -195,7 +195,7 @@ async def test_update(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> No with patch.object(MockWs66i, "open") as method_call: freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not method_call.called @@ -226,13 +226,13 @@ async def test_failed_update( freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Failed update, close called with patch.object(MockWs66i, "zone_status", return_value=None): freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.is_state(ZONE_1_ID, STATE_UNAVAILABLE) @@ -240,12 +240,12 @@ async def test_failed_update( with patch.object(MockWs66i, "zone_status", return_value=None): freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # A connection re-attempt succeeds freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # confirm entity is back on state = hass.states.get(ZONE_1_ID) @@ -315,7 +315,7 @@ async def test_source_select( freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get(ZONE_1_ID) @@ -370,14 +370,14 @@ async def test_volume_up_down( ) freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # should not go below zero assert ws66i.zones[11].volume == 0 await _call_media_player_service(hass, SERVICE_VOLUME_UP, {"entity_id": ZONE_1_ID}) freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ws66i.zones[11].volume == 1 await _call_media_player_service( @@ -385,14 +385,14 @@ async def test_volume_up_down( ) freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ws66i.zones[11].volume == MAX_VOL await _call_media_player_service(hass, SERVICE_VOLUME_UP, {"entity_id": ZONE_1_ID}) freezer.tick(POLL_INTERVAL) async_fire_time_changed(hass) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # should not go above 38 (MAX_VOL) assert ws66i.zones[11].volume == MAX_VOL diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index c5345386777..2cfc3a4f294 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -238,7 +238,7 @@ async def test_xiaomi_exceptions(hass: HomeAssistant, mock_mirobo_is_on) -> None mock_mirobo_is_on.status.side_effect = DeviceException("dummy exception") future = dt_util.utcnow() + timedelta(seconds=60) async_fire_time_changed(hass, future) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not is_available() @@ -247,7 +247,7 @@ async def test_xiaomi_exceptions(hass: HomeAssistant, mock_mirobo_is_on) -> None mock_mirobo_is_on.status.reset_mock() future += timedelta(seconds=60) async_fire_time_changed(hass, future) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not is_available() assert mock_mirobo_is_on.status.call_count == 1 diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 5125c817567..6f1125fcf65 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -76,7 +76,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE @@ -84,7 +84,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE @@ -92,7 +92,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.side_effect = TimeoutError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE @@ -100,7 +100,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.side_effect = UnknownError("info") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE @@ -110,7 +110,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.return_value = load_json client.get_armed_status.return_value = YALE_STATE_ARM_FULL async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_ALARM_ARMED_AWAY @@ -118,7 +118,7 @@ async def test_coordinator_setup_and_update_errors( client.get_all.side_effect = AuthenticationError("Can not authenticate") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE diff --git a/tests/test_core.py b/tests/test_core.py index 11fda50a180..a0a197096cd 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -588,6 +588,46 @@ async def test_async_get_hass_can_be_called(hass: HomeAssistant) -> None: my_job_create_task.join() +async def test_async_add_executor_job_background(hass: HomeAssistant) -> None: + """Test running an executor job in the background.""" + calls = [] + + def job(): + time.sleep(0.01) + calls.append(1) + + async def _async_add_executor_job(): + await hass.async_add_executor_job(job) + + task = hass.async_create_background_task( + _async_add_executor_job(), "background", eager_start=True + ) + await hass.async_block_till_done() + assert len(calls) == 0 + await hass.async_block_till_done(wait_background_tasks=True) + assert len(calls) == 1 + await task + + +async def test_async_add_executor_job(hass: HomeAssistant) -> None: + """Test running an executor job.""" + calls = [] + + def job(): + time.sleep(0.01) + calls.append(1) + + async def _async_add_executor_job(): + await hass.async_add_executor_job(job) + + task = hass.async_create_task( + _async_add_executor_job(), "background", eager_start=True + ) + await hass.async_block_till_done() + assert len(calls) == 1 + await task + + async def test_stage_shutdown(hass: HomeAssistant) -> None: """Simulate a shutdown, test calling stuff.""" test_stop = async_capture_events(hass, EVENT_HOMEASSISTANT_STOP) From f2edc156874511b8a921043a65249519c0bb74e9 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Sat, 30 Mar 2024 15:59:20 -0500 Subject: [PATCH 047/426] Add initial support for floors to intents (#114456) * Add initial support for floors to intents * Fix climate intent * More tests * No return value * Add requested changes * Reuse event handler --- homeassistant/components/climate/intent.py | 2 + .../components/conversation/default_agent.py | 46 +++++++- .../components/conversation/manifest.json | 2 +- homeassistant/helpers/intent.py | 110 ++++++++++++++---- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../conversation/test_default_agent.py | 73 +++++++++++- .../test_default_agent_intents.py | 105 ++++++++++++++++- tests/helpers/test_intent.py | 76 +++++++++++- 10 files changed, 384 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/climate/intent.py b/homeassistant/components/climate/intent.py index db263451f0b..3073d3e3c26 100644 --- a/homeassistant/components/climate/intent.py +++ b/homeassistant/components/climate/intent.py @@ -58,6 +58,7 @@ class GetTemperatureIntent(intent.IntentHandler): raise intent.NoStatesMatchedError( name=entity_text or entity_name, area=area_name or area_id, + floor=None, domains={DOMAIN}, device_classes=None, ) @@ -75,6 +76,7 @@ class GetTemperatureIntent(intent.IntentHandler): raise intent.NoStatesMatchedError( name=entity_name, area=None, + floor=None, domains={DOMAIN}, device_classes=None, ) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 96b0565ebd3..c0307c68908 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -34,6 +34,7 @@ from homeassistant.helpers import ( area_registry as ar, device_registry as dr, entity_registry as er, + floor_registry as fr, intent, start, template, @@ -163,7 +164,12 @@ class DefaultAgent(AbstractConversationAgent): self.hass.bus.async_listen( ar.EVENT_AREA_REGISTRY_UPDATED, - self._async_handle_area_registry_changed, + self._async_handle_area_floor_registry_changed, + run_immediately=True, + ) + self.hass.bus.async_listen( + fr.EVENT_FLOOR_REGISTRY_UPDATED, + self._async_handle_area_floor_registry_changed, run_immediately=True, ) self.hass.bus.async_listen( @@ -696,10 +702,13 @@ class DefaultAgent(AbstractConversationAgent): return lang_intents @core.callback - def _async_handle_area_registry_changed( - self, event: core.Event[ar.EventAreaRegistryUpdatedData] + def _async_handle_area_floor_registry_changed( + self, + event: core.Event[ + ar.EventAreaRegistryUpdatedData | fr.EventFloorRegistryUpdatedData + ], ) -> None: - """Clear area area cache when the area registry has changed.""" + """Clear area/floor list cache when the area registry has changed.""" self._slot_lists = None @core.callback @@ -773,6 +782,8 @@ class DefaultAgent(AbstractConversationAgent): # Default name entity_names.append((state.name, state.name, context)) + _LOGGER.debug("Exposed entities: %s", entity_names) + # Expose all areas. # # We pass in area id here with the expectation that no two areas will @@ -788,11 +799,25 @@ class DefaultAgent(AbstractConversationAgent): area_names.append((alias, area.id)) - _LOGGER.debug("Exposed entities: %s", entity_names) + # Expose all floors. + # + # We pass in floor id here with the expectation that no two floors will + # share the same name or alias. + floors = fr.async_get(self.hass) + floor_names = [] + for floor in floors.async_list_floors(): + floor_names.append((floor.name, floor.floor_id)) + if floor.aliases: + for alias in floor.aliases: + if not alias.strip(): + continue + + floor_names.append((alias, floor.floor_id)) self._slot_lists = { "area": TextSlotList.from_tuples(area_names, allow_template=False), "name": TextSlotList.from_tuples(entity_names, allow_template=False), + "floor": TextSlotList.from_tuples(floor_names, allow_template=False), } return self._slot_lists @@ -953,6 +978,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str # area only return ErrorKey.NO_AREA, {"area": unmatched_area} + if unmatched_floor := unmatched_text.get("floor"): + # floor only + return ErrorKey.NO_FLOOR, {"floor": unmatched_floor} + # Area may still have matched matched_area: str | None = None if matched_area_entity := result.entities.get("area"): @@ -1000,6 +1029,13 @@ def _get_no_states_matched_response( "area": no_states_error.area, } + if no_states_error.floor: + # domain in floor + return ErrorKey.NO_DOMAIN_IN_FLOOR, { + "domain": domain, + "floor": no_states_error.floor, + } + # domain only return ErrorKey.NO_DOMAIN, {"domain": domain} diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 7f3c4f5894e..7f463483bf9 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.27"] + "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.29"] } diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index 63214cb135b..fcebf91b854 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -24,7 +24,13 @@ from homeassistant.core import Context, HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import bind_hass -from . import area_registry, config_validation as cv, device_registry, entity_registry +from . import ( + area_registry, + config_validation as cv, + device_registry, + entity_registry, + floor_registry, +) _LOGGER = logging.getLogger(__name__) _SlotsType = dict[str, Any] @@ -144,16 +150,18 @@ class NoStatesMatchedError(IntentError): def __init__( self, - name: str | None, - area: str | None, - domains: set[str] | None, - device_classes: set[str] | None, + name: str | None = None, + area: str | None = None, + floor: str | None = None, + domains: set[str] | None = None, + device_classes: set[str] | None = None, ) -> None: """Initialize error.""" super().__init__() self.name = name self.area = area + self.floor = floor self.domains = domains self.device_classes = device_classes @@ -220,12 +228,35 @@ def _find_area( return None -def _filter_by_area( +def _find_floor( + id_or_name: str, floors: floor_registry.FloorRegistry +) -> floor_registry.FloorEntry | None: + """Find an floor by id or name, checking aliases too.""" + floor = floors.async_get_floor(id_or_name) or floors.async_get_floor_by_name( + id_or_name + ) + if floor is not None: + return floor + + # Check floor aliases + for maybe_floor in floors.floors.values(): + if not maybe_floor.aliases: + continue + + for floor_alias in maybe_floor.aliases: + if id_or_name == floor_alias.casefold(): + return maybe_floor + + return None + + +def _filter_by_areas( states_and_entities: list[tuple[State, entity_registry.RegistryEntry | None]], - area: area_registry.AreaEntry, + areas: Iterable[area_registry.AreaEntry], devices: device_registry.DeviceRegistry, ) -> Iterable[tuple[State, entity_registry.RegistryEntry | None]]: """Filter state/entity pairs by an area.""" + filter_area_ids: set[str | None] = {a.id for a in areas} entity_area_ids: dict[str, str | None] = {} for _state, entity in states_and_entities: if entity is None: @@ -241,7 +272,7 @@ def _filter_by_area( entity_area_ids[entity.id] = device.area_id for state, entity in states_and_entities: - if (entity is not None) and (entity_area_ids.get(entity.id) == area.id): + if (entity is not None) and (entity_area_ids.get(entity.id) in filter_area_ids): yield (state, entity) @@ -252,11 +283,14 @@ def async_match_states( name: str | None = None, area_name: str | None = None, area: area_registry.AreaEntry | None = None, + floor_name: str | None = None, + floor: floor_registry.FloorEntry | None = None, domains: Collection[str] | None = None, device_classes: Collection[str] | None = None, states: Iterable[State] | None = None, entities: entity_registry.EntityRegistry | None = None, areas: area_registry.AreaRegistry | None = None, + floors: floor_registry.FloorRegistry | None = None, devices: device_registry.DeviceRegistry | None = None, assistant: str | None = None, ) -> Iterable[State]: @@ -268,6 +302,15 @@ def async_match_states( if entities is None: entities = entity_registry.async_get(hass) + if devices is None: + devices = device_registry.async_get(hass) + + if areas is None: + areas = area_registry.async_get(hass) + + if floors is None: + floors = floor_registry.async_get(hass) + # Gather entities states_and_entities: list[tuple[State, entity_registry.RegistryEntry | None]] = [] for state in states: @@ -294,20 +337,35 @@ def async_match_states( if _is_device_class(state, entity, device_classes) ] + filter_areas: list[area_registry.AreaEntry] = [] + + if (floor is None) and (floor_name is not None): + # Look up floor by name + floor = _find_floor(floor_name, floors) + if floor is None: + _LOGGER.warning("Floor not found: %s", floor_name) + return + + if floor is not None: + filter_areas = [ + a for a in areas.async_list_areas() if a.floor_id == floor.floor_id + ] + if (area is None) and (area_name is not None): # Look up area by name - if areas is None: - areas = area_registry.async_get(hass) - area = _find_area(area_name, areas) - assert area is not None, f"No area named {area_name}" + if area is None: + _LOGGER.warning("Area not found: %s", area_name) + return if area is not None: - # Filter by states/entities by area - if devices is None: - devices = device_registry.async_get(hass) + filter_areas = [area] - states_and_entities = list(_filter_by_area(states_and_entities, area, devices)) + if filter_areas: + # Filter by states/entities by area + states_and_entities = list( + _filter_by_areas(states_and_entities, filter_areas, devices) + ) if assistant is not None: # Filter by exposure @@ -318,9 +376,6 @@ def async_match_states( ] if name is not None: - if devices is None: - devices = device_registry.async_get(hass) - # Filter by name name = name.casefold() @@ -389,7 +444,7 @@ class DynamicServiceIntentHandler(IntentHandler): """ slot_schema = { - vol.Any("name", "area"): cv.string, + vol.Any("name", "area", "floor"): cv.string, vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), } @@ -453,7 +508,7 @@ class DynamicServiceIntentHandler(IntentHandler): # Don't match on name if targeting all entities entity_name = None - # Look up area first to fail early + # Look up area to fail early area_slot = slots.get("area", {}) area_id = area_slot.get("value") area_name = area_slot.get("text") @@ -464,6 +519,17 @@ class DynamicServiceIntentHandler(IntentHandler): if area is None: raise IntentHandleError(f"No area named {area_name}") + # Look up floor to fail early + floor_slot = slots.get("floor", {}) + floor_id = floor_slot.get("value") + floor_name = floor_slot.get("text") + floor: floor_registry.FloorEntry | None = None + if floor_id is not None: + floors = floor_registry.async_get(hass) + floor = floors.async_get_floor(floor_id) + if floor is None: + raise IntentHandleError(f"No floor named {floor_name}") + # Optional domain/device class filters. # Convert to sets for speed. domains: set[str] | None = None @@ -480,6 +546,7 @@ class DynamicServiceIntentHandler(IntentHandler): hass, name=entity_name, area=area, + floor=floor, domains=domains, device_classes=device_classes, assistant=intent_obj.assistant, @@ -491,6 +558,7 @@ class DynamicServiceIntentHandler(IntentHandler): raise NoStatesMatchedError( name=entity_text or entity_name, area=area_name or area_id, + floor=floor_name or floor_id, domains=domains, device_classes=device_classes, ) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1d60b74f18f..bdfaa8fcf45 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 home-assistant-frontend==20240329.1 -home-assistant-intents==2024.3.27 +home-assistant-intents==2024.3.29 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.3 diff --git a/requirements_all.txt b/requirements_all.txt index 0a94eb110e1..da752f00279 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1080,7 +1080,7 @@ holidays==0.45 home-assistant-frontend==20240329.1 # homeassistant.components.conversation -home-assistant-intents==2024.3.27 +home-assistant-intents==2024.3.29 # homeassistant.components.home_connect homeconnect==0.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a3ebbbb077..4889e9de781 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -879,7 +879,7 @@ holidays==0.45 home-assistant-frontend==20240329.1 # homeassistant.components.conversation -home-assistant-intents==2024.3.27 +home-assistant-intents==2024.3.29 # homeassistant.components.home_connect homeconnect==0.7.2 diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index aefb37f427e..8f38459a8da 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -17,6 +17,7 @@ from homeassistant.helpers import ( device_registry as dr, entity, entity_registry as er, + floor_registry as fr, intent, ) from homeassistant.setup import async_setup_component @@ -480,6 +481,20 @@ async def test_error_no_area(hass: HomeAssistant, init_components) -> None: ) +async def test_error_no_floor(hass: HomeAssistant, init_components) -> None: + """Test error message when floor is missing.""" + result = await conversation.async_converse( + hass, "turn on all the lights on missing floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any floor called missing" + ) + + async def test_error_no_device_in_area( hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry ) -> None: @@ -549,6 +564,48 @@ async def test_error_no_domain_in_area( ) +async def test_error_no_domain_in_floor( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when no devices/entities for a domain exist on a floor.""" + floor_ground = floor_registry.async_create("ground") + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + result = await conversation.async_converse( + hass, "turn on all lights on the ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any light on the ground floor" + ) + + # Add a new floor/area to trigger registry event handlers + floor_upstairs = floor_registry.async_create("upstairs") + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update( + area_bedroom.id, name="bedroom", floor_id=floor_upstairs.floor_id + ) + + result = await conversation.async_converse( + hass, "turn on all lights upstairs", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any light on the upstairs floor" + ) + + async def test_error_no_device_class(hass: HomeAssistant, init_components) -> None: """Test error message when no entities of a device class exist.""" @@ -736,7 +793,7 @@ async def test_no_states_matched_default_error( with patch( "homeassistant.components.conversation.default_agent.intent.async_handle", - side_effect=intent.NoStatesMatchedError(None, None, None, None), + side_effect=intent.NoStatesMatchedError(), ): result = await conversation.async_converse( hass, "turn on lights in the kitchen", None, Context(), None @@ -759,11 +816,16 @@ async def test_empty_aliases( area_registry: ar.AreaRegistry, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Test that empty aliases are not added to slot lists.""" + floor_1 = floor_registry.async_create("first floor", aliases={" "}) + area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - area_kitchen = area_registry.async_update(area_kitchen.id, aliases={" "}) + area_kitchen = area_registry.async_update( + area_kitchen.id, aliases={" "}, floor_id=floor_1 + ) entry = MockConfigEntry() entry.add_to_hass(hass) @@ -799,7 +861,7 @@ async def test_empty_aliases( slot_lists = mock_recognize_all.call_args[0][2] # Slot lists should only contain non-empty text - assert slot_lists.keys() == {"area", "name"} + assert slot_lists.keys() == {"area", "name", "floor"} areas = slot_lists["area"] assert len(areas.values) == 1 assert areas.values[0].value_out == area_kitchen.id @@ -810,6 +872,11 @@ async def test_empty_aliases( assert names.values[0].value_out == kitchen_light.name assert names.values[0].text_in.text == kitchen_light.name + floors = slot_lists["floor"] + assert len(floors.values) == 1 + assert floors.values[0].value_out == floor_1.floor_id + assert floors.values[0].text_in.text == floor_1.name + async def test_all_domains_loaded(hass: HomeAssistant, init_components) -> None: """Test that sentences for all domains are always loaded.""" diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index c57d93d8cef..9636ac07f63 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -2,14 +2,26 @@ import pytest -from homeassistant.components import conversation, cover, media_player, vacuum, valve +from homeassistant.components import ( + conversation, + cover, + light, + media_player, + vacuum, + valve, +) from homeassistant.components.cover import intent as cover_intent from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.media_player import intent as media_player_intent from homeassistant.components.vacuum import intent as vaccum_intent from homeassistant.const import STATE_CLOSED from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import intent +from homeassistant.helpers import ( + area_registry as ar, + entity_registry as er, + floor_registry as fr, + intent, +) from homeassistant.setup import async_setup_component from tests.common import async_mock_service @@ -244,3 +256,92 @@ async def test_media_player_intents( "entity_id": entity_id, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.75, } + + +async def test_turn_floor_lights_on_off( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test that we can turn lights on/off for an entire floor.""" + floor_ground = floor_registry.async_create("ground", aliases={"downstairs"}) + floor_upstairs = floor_registry.async_create("upstairs") + + # Kitchen and living room are on the ground floor + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + + area_living_room = area_registry.async_get_or_create("living_room_id") + area_living_room = area_registry.async_update( + area_living_room.id, name="living_room", floor_id=floor_ground.floor_id + ) + + # Bedroom is upstairs + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update( + area_bedroom.id, name="bedroom", floor_id=floor_upstairs.floor_id + ) + + # One light per area + kitchen_light = entity_registry.async_get_or_create( + "light", "demo", "kitchen_light" + ) + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=area_kitchen.id + ) + hass.states.async_set(kitchen_light.entity_id, "off") + + living_room_light = entity_registry.async_get_or_create( + "light", "demo", "living_room_light" + ) + living_room_light = entity_registry.async_update_entity( + living_room_light.entity_id, area_id=area_living_room.id + ) + hass.states.async_set(living_room_light.entity_id, "off") + + bedroom_light = entity_registry.async_get_or_create( + "light", "demo", "bedroom_light" + ) + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=area_bedroom.id + ) + hass.states.async_set(bedroom_light.entity_id, "off") + + # Target by floor + on_calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) + result = await conversation.async_converse( + hass, "turn on all lights downstairs", None, Context(), None + ) + + assert len(on_calls) == 2 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert {s.entity_id for s in result.response.matched_states} == { + kitchen_light.entity_id, + living_room_light.entity_id, + } + + on_calls.clear() + result = await conversation.async_converse( + hass, "upstairs lights on", None, Context(), None + ) + + assert len(on_calls) == 1 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert {s.entity_id for s in result.response.matched_states} == { + bedroom_light.entity_id + } + + off_calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_OFF) + result = await conversation.async_converse( + hass, "turn upstairs lights off", None, Context(), None + ) + + assert len(off_calls) == 1 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert {s.entity_id for s in result.response.matched_states} == { + bedroom_light.entity_id + } diff --git a/tests/helpers/test_intent.py b/tests/helpers/test_intent.py index 1bc01c28cf2..d77eb698205 100644 --- a/tests/helpers/test_intent.py +++ b/tests/helpers/test_intent.py @@ -15,6 +15,7 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_registry as er, + floor_registry as fr, intent, ) from homeassistant.setup import async_setup_component @@ -34,12 +35,25 @@ async def test_async_match_states( hass: HomeAssistant, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Test async_match_state helper.""" area_kitchen = area_registry.async_get_or_create("kitchen") - area_registry.async_update(area_kitchen.id, aliases={"food room"}) + area_kitchen = area_registry.async_update(area_kitchen.id, aliases={"food room"}) area_bedroom = area_registry.async_get_or_create("bedroom") + # Kitchen is on the first floor + floor_1 = floor_registry.async_create("first floor", aliases={"ground floor"}) + area_kitchen = area_registry.async_update( + area_kitchen.id, floor_id=floor_1.floor_id + ) + + # Bedroom is on the second floor + floor_2 = floor_registry.async_create("second floor") + area_bedroom = area_registry.async_update( + area_bedroom.id, floor_id=floor_2.floor_id + ) + state1 = State( "light.kitchen", "on", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} ) @@ -94,6 +108,13 @@ async def test_async_match_states( ) ) + # Invalid area + assert not list( + intent.async_match_states( + hass, area_name="invalid area", states=[state1, state2] + ) + ) + # Domain + area assert list( intent.async_match_states( @@ -111,6 +132,35 @@ async def test_async_match_states( ) ) == [state2] + # Floor + assert list( + intent.async_match_states( + hass, floor_name="first floor", states=[state1, state2] + ) + ) == [state1] + + assert list( + intent.async_match_states( + # Check alias + hass, + floor_name="ground floor", + states=[state1, state2], + ) + ) == [state1] + + assert list( + intent.async_match_states( + hass, floor_name="second floor", states=[state1, state2] + ) + ) == [state2] + + # Invalid floor + assert not list( + intent.async_match_states( + hass, floor_name="invalid floor", states=[state1, state2] + ) + ) + async def test_match_device_area( hass: HomeAssistant, @@ -300,3 +350,27 @@ async def test_validate_then_run_in_background(hass: HomeAssistant) -> None: assert len(calls) == 1 assert calls[0].data == {"entity_id": "light.kitchen"} + + +async def test_invalid_area_floor_names(hass: HomeAssistant) -> None: + """Test that we throw an intent handle error with invalid area/floor names.""" + handler = intent.ServiceIntentHandler( + "TestType", "light", "turn_on", "Turned {} on" + ) + intent.async_register(hass, handler) + + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + "TestType", + slots={"area": {"value": "invalid area"}}, + ) + + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + "TestType", + slots={"floor": {"value": "invalid floor"}}, + ) From bdf51553eff11b42eb2d06121b320471f9531d68 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Mar 2024 06:34:47 -1000 Subject: [PATCH 048/426] Improve sonos test synchronization (#114468) --- tests/components/sonos/conftest.py | 35 +++++++++++++++++++++++--- tests/components/sonos/test_repairs.py | 12 ++++----- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 00858a180a3..576c9a80799 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,16 +1,20 @@ """Configuration for Sonos tests.""" +import asyncio +from collections.abc import Callable from copy import copy from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo +from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.sonos import DOMAIN from homeassistant.const import CONF_HOSTS +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -30,6 +34,31 @@ class SonosMockSubscribe: """Initialize the mock subscriber.""" self.event_listener = SonosMockEventListener(ip_address) self.service = Mock() + self.callback_future: asyncio.Future[Callable[[SonosEvent], None]] = None + self._callback: Callable[[SonosEvent], None] | None = None + + @property + def callback(self) -> Callable[[SonosEvent], None] | None: + """Return the callback.""" + return self._callback + + @callback.setter + def callback(self, callback: Callable[[SonosEvent], None]) -> None: + """Set the callback.""" + self._callback = callback + future = self._get_callback_future() + if not future.done(): + future.set_result(callback) + + def _get_callback_future(self) -> asyncio.Future[Callable[[SonosEvent], None]]: + """Get the callback future.""" + if not self.callback_future: + self.callback_future = asyncio.get_running_loop().create_future() + return self.callback_future + + async def wait_for_callback_to_be_set(self) -> Callable[[SonosEvent], None]: + """Wait for the callback to be set.""" + return await self._get_callback_future() async def unsubscribe(self) -> None: """Unsubscribe mock.""" @@ -456,14 +485,14 @@ def zgs_discovery_fixture(): @pytest.fixture(name="fire_zgs_event") -def zgs_event_fixture(hass, soco, zgs_discovery): +def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): """Create alarm_event fixture.""" variables = {"ZoneGroupState": zgs_discovery} async def _wrapper(): event = SonosMockEvent(soco, soco.zoneGroupTopology, variables) - subscription = soco.zoneGroupTopology.subscribe.return_value - sub_callback = subscription.callback + subscription: SonosMockSubscribe = soco.zoneGroupTopology.subscribe.return_value + sub_callback = await subscription.wait_for_callback_to_be_set() sub_callback(event) await hass.async_block_till_done() diff --git a/tests/components/sonos/test_repairs.py b/tests/components/sonos/test_repairs.py index cf64912e498..49b87b272d6 100644 --- a/tests/components/sonos/test_repairs.py +++ b/tests/components/sonos/test_repairs.py @@ -2,6 +2,8 @@ from unittest.mock import Mock +from soco import SoCo + from homeassistant.components.sonos.const import ( DOMAIN, SCAN_INTERVAL, @@ -11,27 +13,25 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.issue_registry import async_get as async_get_issue_registry from homeassistant.util import dt as dt_util -from .conftest import SonosMockEvent +from .conftest import SonosMockEvent, SonosMockSubscribe from tests.common import MockConfigEntry, async_fire_time_changed async def test_subscription_repair_issues( - hass: HomeAssistant, config_entry: MockConfigEntry, soco, zgs_discovery + hass: HomeAssistant, config_entry: MockConfigEntry, soco: SoCo, zgs_discovery ) -> None: """Test repair issues handling for failed subscriptions.""" issue_registry = async_get_issue_registry(hass) - subscription = soco.zoneGroupTopology.subscribe.return_value + subscription: SonosMockSubscribe = soco.zoneGroupTopology.subscribe.return_value subscription.event_listener = Mock(address=("192.168.4.2", 1400)) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - await hass.async_block_till_done() # Ensure an issue is registered on subscription failure - sub_callback = subscription.callback + sub_callback = await subscription.wait_for_callback_to_be_set() async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) await hass.async_block_till_done(wait_background_tasks=True) assert issue_registry.async_get_issue(DOMAIN, SUB_FAIL_ISSUE_ID) From c373d40e34af020a9ef74b1949bee93f6a9ef173 Mon Sep 17 00:00:00 2001 From: dotvav Date: Sun, 31 Mar 2024 11:38:59 +0200 Subject: [PATCH 049/426] Fix Overkiz Hitachi OVP air-to-air heat pump (#114487) Unpack command parameters instead of passing a list --- .../climate_entities/hitachi_air_to_air_heat_pump_ovp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py index 86cde4fc4db..b4d6ab788a1 100644 --- a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py +++ b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py @@ -357,5 +357,5 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity): ] await self.executor.async_execute_command( - OverkizCommand.GLOBAL_CONTROL, command_data + OverkizCommand.GLOBAL_CONTROL, *command_data ) From 008e4413b5e579c426efb3cb2adcc8b400010049 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Mar 2024 08:56:01 -1000 Subject: [PATCH 050/426] Fix late load of anyio doing blocking I/O in the event loop (#114491) * Fix late load of anyio doing blocking I/O in the event loop httpx loads anyio which loads the asyncio backend in the event loop as soon as httpx makes the first request * tweak --- homeassistant/bootstrap.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 03c0de1ff62..5b805b6138e 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -93,6 +93,11 @@ from .util.async_ import create_eager_task from .util.logging import async_activate_log_queue_handler from .util.package import async_get_user_site, is_virtual_env +with contextlib.suppress(ImportError): + # Ensure anyio backend is imported to avoid it being imported in the event loop + from anyio._backends import _asyncio # noqa: F401 + + if TYPE_CHECKING: from .runner import RuntimeConfig From e8afdd67d0efff4d0d0752cf2baead5545036045 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Mar 2024 08:05:14 -1000 Subject: [PATCH 051/426] Fix workday doing blocking I/O in the event loop (#114492) --- homeassistant/components/workday/__init__.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/workday/__init__.py b/homeassistant/components/workday/__init__.py index 195221ef088..077a6710b8d 100644 --- a/homeassistant/components/workday/__init__.py +++ b/homeassistant/components/workday/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +from functools import partial + from holidays import HolidayBase, country_holidays from homeassistant.config_entries import ConfigEntry @@ -13,7 +15,7 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from .const import CONF_PROVINCE, DOMAIN, PLATFORMS -def _validate_country_and_province( +async def _async_validate_country_and_province( hass: HomeAssistant, entry: ConfigEntry, country: str | None, province: str | None ) -> None: """Validate country and province.""" @@ -21,7 +23,7 @@ def _validate_country_and_province( if not country: return try: - country_holidays(country) + await hass.async_add_executor_job(country_holidays, country) except NotImplementedError as ex: async_create_issue( hass, @@ -39,7 +41,9 @@ def _validate_country_and_province( if not province: return try: - country_holidays(country, subdiv=province) + await hass.async_add_executor_job( + partial(country_holidays, country, subdiv=province) + ) except NotImplementedError as ex: async_create_issue( hass, @@ -66,10 +70,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: country: str | None = entry.options.get(CONF_COUNTRY) province: str | None = entry.options.get(CONF_PROVINCE) - _validate_country_and_province(hass, entry, country, province) + await _async_validate_country_and_province(hass, entry, country, province) if country and CONF_LANGUAGE not in entry.options: - cls: HolidayBase = country_holidays(country, subdiv=province) + cls: HolidayBase = await hass.async_add_executor_job( + partial(country_holidays, country, subdiv=province) + ) default_language = cls.default_language new_options = entry.options.copy() new_options[CONF_LANGUAGE] = default_language From ef97255d9c0f67f4b79d02095f7eca7d44660705 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 31 Mar 2024 20:08:43 +0200 Subject: [PATCH 052/426] Fix server update from breaking setup in Speedtest.NET (#114524) --- homeassistant/components/speedtestdotnet/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/speedtestdotnet/__init__.py b/homeassistant/components/speedtestdotnet/__init__.py index 831e66d1c4e..3c15f2fb820 100644 --- a/homeassistant/components/speedtestdotnet/__init__.py +++ b/homeassistant/components/speedtestdotnet/__init__.py @@ -25,10 +25,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b partial(speedtest.Speedtest, secure=True) ) coordinator = SpeedTestDataCoordinator(hass, config_entry, api) - await hass.async_add_executor_job(coordinator.update_servers) except speedtest.SpeedtestException as err: raise ConfigEntryNotReady from err + hass.data[DOMAIN] = coordinator + async def _async_finish_startup(hass: HomeAssistant) -> None: """Run this only when HA has finished its startup.""" await coordinator.async_config_entry_first_refresh() @@ -36,8 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # Don't start a speedtest during startup async_at_started(hass, _async_finish_startup) - hass.data[DOMAIN] = coordinator - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) config_entry.async_on_unload(config_entry.add_update_listener(update_listener)) From 14dfb6a2552587e2ad2c6059616b5d4c5750f0b5 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Mon, 1 Apr 2024 00:24:41 +0200 Subject: [PATCH 053/426] Bump axis to v60 (#114544) * Improve Axis MQTT support * Bump axis to v60 --- homeassistant/components/axis/hub/hub.py | 5 +++-- homeassistant/components/axis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/axis/const.py | 1 + tests/components/axis/test_hub.py | 4 ++-- 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/axis/hub/hub.py b/homeassistant/components/axis/hub/hub.py index 08eb816f6ab..4abd1358417 100644 --- a/homeassistant/components/axis/hub/hub.py +++ b/homeassistant/components/axis/hub/hub.py @@ -116,7 +116,7 @@ class AxisHub: if status.status.state == ClientState.ACTIVE: self.config.entry.async_on_unload( await mqtt.async_subscribe( - hass, f"{self.api.vapix.serial_number}/#", self.mqtt_message + hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message ) ) @@ -124,7 +124,8 @@ class AxisHub: def mqtt_message(self, message: ReceiveMessage) -> None: """Receive Axis MQTT message.""" self.disconnect_from_stream() - + if message.topic.endswith("event/connection"): + return event = mqtt_json_to_event(message.payload) self.api.event.handler(event) diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index f47d10df484..1065783d957 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -26,7 +26,7 @@ "iot_class": "local_push", "loggers": ["axis"], "quality_scale": "platinum", - "requirements": ["axis==59"], + "requirements": ["axis==60"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/requirements_all.txt b/requirements_all.txt index da752f00279..1ef594ea50b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -514,7 +514,7 @@ aurorapy==0.2.7 # avion==0.10 # homeassistant.components.axis -axis==59 +axis==60 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4889e9de781..fc19ead8e95 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -454,7 +454,7 @@ auroranoaa==0.0.3 aurorapy==0.2.7 # homeassistant.components.axis -axis==59 +axis==60 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 diff --git a/tests/components/axis/const.py b/tests/components/axis/const.py index 7b881ea55e5..16b9d17f99e 100644 --- a/tests/components/axis/const.py +++ b/tests/components/axis/const.py @@ -74,6 +74,7 @@ MQTT_CLIENT_RESPONSE = { "status": {"state": "active", "connectionStatus": "Connected"}, "config": { "server": {"protocol": "tcp", "host": "192.168.0.90", "port": 1883}, + "deviceTopicPrefix": f"axis/{MAC}", }, }, } diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index 3291f88d90a..1ae6db05427 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -91,9 +91,9 @@ async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry ) -> None: """Successful setup.""" - mqtt_mock.async_subscribe.assert_called_with(f"{MAC}/#", mock.ANY, 0, "utf-8") + mqtt_mock.async_subscribe.assert_called_with(f"axis/{MAC}/#", mock.ANY, 0, "utf-8") - topic = f"{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0" + topic = f"axis/{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0" message = ( b'{"timestamp": 1590258472044, "topic": "onvif:Device/axis:Sensor/PIR",' b' "message": {"source": {"sensor": "0"}, "key": {}, "data": {"state": "1"}}}' From 71588b5c22a9f3bd2191cb680fb57b4a0fa61c3c Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 1 Apr 2024 15:53:14 +0200 Subject: [PATCH 054/426] Fix wrong icons (#114567) * Fix wrong icons * Fix wrong icons --- homeassistant/components/ffmpeg/icons.json | 2 +- homeassistant/components/input_select/icons.json | 2 +- homeassistant/components/media_player/icons.json | 2 +- homeassistant/components/synology_dsm/icons.json | 2 +- homeassistant/components/timer/icons.json | 2 +- homeassistant/components/xiaomi_miio/icons.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/ffmpeg/icons.json b/homeassistant/components/ffmpeg/icons.json index 3017b7dc0da..a23f024599c 100644 --- a/homeassistant/components/ffmpeg/icons.json +++ b/homeassistant/components/ffmpeg/icons.json @@ -1,7 +1,7 @@ { "services": { "restart": "mdi:restart", - "start": "mdi:start", + "start": "mdi:play", "stop": "mdi:stop" } } diff --git a/homeassistant/components/input_select/icons.json b/homeassistant/components/input_select/icons.json index 894b6be60dd..03b477ddb36 100644 --- a/homeassistant/components/input_select/icons.json +++ b/homeassistant/components/input_select/icons.json @@ -1,6 +1,6 @@ { "services": { - "select_next": "mdi:skip", + "select_next": "mdi:skip-next", "select_option": "mdi:check", "select_previous": "mdi:skip-previous", "select_first": "mdi:skip-backward", diff --git a/homeassistant/components/media_player/icons.json b/homeassistant/components/media_player/icons.json index e2769085833..847ce5989d6 100644 --- a/homeassistant/components/media_player/icons.json +++ b/homeassistant/components/media_player/icons.json @@ -52,7 +52,7 @@ "unjoin": "mdi:ungroup", "volume_down": "mdi:volume-minus", "volume_mute": "mdi:volume-mute", - "volume_set": "mdi:volume", + "volume_set": "mdi:volume-medium", "volume_up": "mdi:volume-plus" } } diff --git a/homeassistant/components/synology_dsm/icons.json b/homeassistant/components/synology_dsm/icons.json index bbdbc9d2c96..8b4fad457d5 100644 --- a/homeassistant/components/synology_dsm/icons.json +++ b/homeassistant/components/synology_dsm/icons.json @@ -75,7 +75,7 @@ } }, "services": { - "reboot": "mdi:reboot", + "reboot": "mdi:restart", "shutdown": "mdi:power" } } diff --git a/homeassistant/components/timer/icons.json b/homeassistant/components/timer/icons.json index 4cad5c119bd..1e352f7280b 100644 --- a/homeassistant/components/timer/icons.json +++ b/homeassistant/components/timer/icons.json @@ -1,6 +1,6 @@ { "services": { - "start": "mdi:start", + "start": "mdi:play", "pause": "mdi:pause", "cancel": "mdi:cancel", "finish": "mdi:check", diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index a9daaf9a61c..bbd3f6607d7 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -17,7 +17,7 @@ "switch_set_wifi_led_off": "mdi:wifi-off", "switch_set_power_price": "mdi:currency-usd", "switch_set_power_mode": "mdi:power", - "vacuum_remote_control_start": "mdi:start", + "vacuum_remote_control_start": "mdi:play", "vacuum_remote_control_stop": "mdi:stop", "vacuum_remote_control_move": "mdi:remote", "vacuum_remote_control_move_step": "mdi:remote", From fc24b61859cc6c1caa97e630364dfd1ff2c55d88 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Mon, 1 Apr 2024 14:49:14 +0200 Subject: [PATCH 055/426] Bump velbusaio to 2024.4.0 (#114569) Bump valbusaio to 2024.4.0 --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index c5f9ccd3563..1c51c58d238 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2023.12.0"], + "requirements": ["velbus-aio==2024.4.0"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 1ef594ea50b..3222dc2460d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2795,7 +2795,7 @@ vallox-websocket-api==5.1.1 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2023.12.0 +velbus-aio==2024.4.0 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fc19ead8e95..27e6f21027e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2151,7 +2151,7 @@ vallox-websocket-api==5.1.1 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2023.12.0 +velbus-aio==2024.4.0 # homeassistant.components.venstar venstarcolortouch==0.19 From 25c611ffc4e397cf5259eb81e6240d9d0f6c831b Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Mon, 1 Apr 2024 21:59:06 +0200 Subject: [PATCH 056/426] Reduce usage of executer threads in AVM Fritz!Tools (#114570) * call entity state update calls in one executer task * remove not needed wrapping * mark as "non-public" method * add guard against changes on _entity_update_functions --- homeassistant/components/fritz/common.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/fritz/common.py b/homeassistant/components/fritz/common.py index 8e773e74c75..5815f9abfc1 100644 --- a/homeassistant/components/fritz/common.py +++ b/homeassistant/components/fritz/common.py @@ -311,6 +311,17 @@ class FritzBoxTools( ) return unregister_entity_updates + def _entity_states_update(self) -> dict: + """Run registered entity update calls.""" + entity_states = {} + for key in list(self._entity_update_functions): + if (update_fn := self._entity_update_functions.get(key)) is not None: + _LOGGER.debug("update entity %s", key) + entity_states[key] = update_fn( + self.fritz_status, self.data["entity_states"].get(key) + ) + return entity_states + async def _async_update_data(self) -> UpdateCoordinatorDataType: """Update FritzboxTools data.""" entity_data: UpdateCoordinatorDataType = { @@ -319,15 +330,9 @@ class FritzBoxTools( } try: await self.async_scan_devices() - for key in list(self._entity_update_functions): - _LOGGER.debug("update entity %s", key) - entity_data["entity_states"][ - key - ] = await self.hass.async_add_executor_job( - self._entity_update_functions[key], - self.fritz_status, - self.data["entity_states"].get(key), - ) + entity_data["entity_states"] = await self.hass.async_add_executor_job( + self._entity_states_update + ) if self.has_call_deflections: entity_data[ "call_deflections" From bd6890ab83392f845b8b30ba2d26d3778b3c1f17 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 2 Apr 2024 03:48:26 +0200 Subject: [PATCH 057/426] Filter out ignored entries in ssdp step of AVM Fritz!SmartHome (#114574) filter out ignored entries in ssdp step --- homeassistant/components/fritzbox/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index 377d46eceff..e32f27969a1 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -141,7 +141,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="already_in_progress") # update old and user-configured config entries - for entry in self._async_current_entries(): + for entry in self._async_current_entries(include_ignore=False): if entry.data[CONF_HOST] == host: if uuid and not entry.unique_id: self.hass.config_entries.async_update_entry(entry, unique_id=uuid) From bb33725e7f966f63eb6595d825d2e028f55b8712 Mon Sep 17 00:00:00 2001 From: jjlawren Date: Mon, 1 Apr 2024 13:28:39 -0400 Subject: [PATCH 058/426] Bump plexapi to 4.15.11 (#114581) --- homeassistant/components/plex/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index e33cbc2e0c1..85362371715 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.10", + "PlexAPI==4.15.11", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/requirements_all.txt b/requirements_all.txt index 3222dc2460d..ced8c6dfec5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ Mastodon.py==1.8.1 Pillow==10.2.0 # homeassistant.components.plex -PlexAPI==4.15.10 +PlexAPI==4.15.11 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 27e6f21027e..b94616bd07b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -39,7 +39,7 @@ HATasmota==0.8.0 Pillow==10.2.0 # homeassistant.components.plex -PlexAPI==4.15.10 +PlexAPI==4.15.11 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 From ea13f102e01f35a662b0a03f207cf8d0d6f0f659 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Tue, 2 Apr 2024 00:14:13 +0200 Subject: [PATCH 059/426] Fix reolink media source data access (#114593) * Add test * Fix reolink media source data access --- homeassistant/components/reolink/media_source.py | 16 ++++++++++------ tests/components/reolink/test_media_source.py | 11 +++++++++++ 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 84c844a0f92..c22a0fc28e7 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -46,7 +46,6 @@ class ReolinkVODMediaSource(MediaSource): """Initialize ReolinkVODMediaSource.""" super().__init__(DOMAIN) self.hass = hass - self.data: dict[str, ReolinkData] = hass.data[DOMAIN] async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" @@ -57,7 +56,8 @@ class ReolinkVODMediaSource(MediaSource): _, config_entry_id, channel_str, stream_res, filename = identifier channel = int(channel_str) - host = self.data[config_entry_id].host + data: dict[str, ReolinkData] = self.hass.data[DOMAIN] + host = data[config_entry_id].host vod_type = VodRequestType.RTMP if host.api.is_nvr: @@ -130,7 +130,8 @@ class ReolinkVODMediaSource(MediaSource): if config_entry.state != ConfigEntryState.LOADED: continue channels: list[str] = [] - host = self.data[config_entry.entry_id].host + data: dict[str, ReolinkData] = self.hass.data[DOMAIN] + host = data[config_entry.entry_id].host entities = er.async_entries_for_config_entry( entity_reg, config_entry.entry_id ) @@ -187,7 +188,8 @@ class ReolinkVODMediaSource(MediaSource): self, config_entry_id: str, channel: int ) -> BrowseMediaSource: """Allow the user to select the high or low playback resolution, (low loads faster).""" - host = self.data[config_entry_id].host + data: dict[str, ReolinkData] = self.hass.data[DOMAIN] + host = data[config_entry_id].host main_enc = await host.api.get_encoding(channel, "main") if main_enc == "h265": @@ -236,7 +238,8 @@ class ReolinkVODMediaSource(MediaSource): self, config_entry_id: str, channel: int, stream: str ) -> BrowseMediaSource: """Return all days on which recordings are available for a reolink camera.""" - host = self.data[config_entry_id].host + data: dict[str, ReolinkData] = self.hass.data[DOMAIN] + host = data[config_entry_id].host # We want today of the camera, not necessarily today of the server now = host.api.time() or await host.api.async_get_time() @@ -288,7 +291,8 @@ class ReolinkVODMediaSource(MediaSource): day: int, ) -> BrowseMediaSource: """Return all recording files on a specific day of a Reolink camera.""" - host = self.data[config_entry_id].host + data: dict[str, ReolinkData] = self.hass.data[DOMAIN] + host = data[config_entry_id].host start = dt.datetime(year, month, day, hour=0, minute=0, second=0) end = dt.datetime(year, month, day, hour=23, minute=59, second=59) diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 9c5aebed222..1eb45945eee 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -65,6 +65,17 @@ async def setup_component(hass: HomeAssistant) -> None: assert await async_setup_component(hass, MEDIA_STREAM_DOMAIN, {}) +async def test_platform_loads_before_config_entry( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the platform can be loaded before the config entry.""" + # Fake that the config entry is not loaded before the media_source platform + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert mock_setup_entry.call_count == 0 + + async def test_resolve( hass: HomeAssistant, reolink_connect: MagicMock, From 112aab47fb4b0657fee8c388631f4652aa67a864 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 11:18:26 -1000 Subject: [PATCH 060/426] Bump zeroconf to 0.132.0 (#114596) changelog: https://github.com/python-zeroconf/python-zeroconf/compare/0.131.0...0.132.0 --- homeassistant/components/zeroconf/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index aecc88968f3..7c489517dd7 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.131.0"] + "requirements": ["zeroconf==0.132.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bdfaa8fcf45..9621137d855 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -60,7 +60,7 @@ voluptuous-serialize==2.6.0 voluptuous==0.13.1 webrtc-noise-gain==1.2.3 yarl==1.9.4 -zeroconf==0.131.0 +zeroconf==0.132.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 diff --git a/requirements_all.txt b/requirements_all.txt index ced8c6dfec5..77c6247e856 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2925,7 +2925,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.131.0 +zeroconf==0.132.0 # homeassistant.components.zeversolar zeversolar==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b94616bd07b..648fd38f1cb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2260,7 +2260,7 @@ yt-dlp==2024.03.10 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.131.0 +zeroconf==0.132.0 # homeassistant.components.zeversolar zeversolar==0.3.1 From 43631d5944ea13125eab8ec69710307adae7d2de Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 15:37:30 -1000 Subject: [PATCH 061/426] Add missing platforms_exist guard to check_config (#114600) * Add missing platforms_exist guard to check_config related issue #112811 When the exception hits, the config will end up being saved in the traceback so the memory is never released. This matches the check_config code to homeassistant.config to avoid having the exception thrown. * patch * merge branch --- homeassistant/helpers/check_config.py | 19 ++++++++++--------- tests/helpers/test_check_config.py | 2 ++ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 8537f442595..78dddb12381 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -198,15 +198,16 @@ async def async_check_ha_config_file( # noqa: C901 # Check if the integration has a custom config validator config_validator = None - try: - config_validator = await integration.async_get_platform("config") - except ImportError as err: - # Filter out import error of the config platform. - # If the config platform contains bad imports, make sure - # that still fails. - if err.name != f"{integration.pkg_path}.config": - result.add_error(f"Error importing config platform {domain}: {err}") - continue + if integration.platforms_exists(("config",)): + try: + config_validator = await integration.async_get_platform("config") + except ImportError as err: + # Filter out import error of the config platform. + # If the config platform contains bad imports, make sure + # that still fails. + if err.name != f"{integration.pkg_path}.config": + result.add_error(f"Error importing config platform {domain}: {err}") + continue if config_validator is not None and hasattr( config_validator, "async_validate_config" diff --git a/tests/helpers/test_check_config.py b/tests/helpers/test_check_config.py index fd94c453e51..de7edf42dc2 100644 --- a/tests/helpers/test_check_config.py +++ b/tests/helpers/test_check_config.py @@ -350,6 +350,7 @@ async def test_config_platform_import_error(hass: HomeAssistant) -> None: side_effect=ImportError("blablabla"), ), patch("os.path.isfile", return_value=True), + patch("homeassistant.loader.Integration.platforms_exists", return_value=True), patch_yaml_files(files), ): res = await async_check_ha_config_file(hass) @@ -373,6 +374,7 @@ async def test_platform_import_error(hass: HomeAssistant) -> None: "homeassistant.loader.Integration.async_get_platform", side_effect=[None, ImportError("blablabla")], ), + patch("homeassistant.loader.Integration.platforms_exists", return_value=True), patch("os.path.isfile", return_value=True), patch_yaml_files(files), ): From 623d85ecaac3e5a8955c9719d101d5c92931a164 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 15:36:48 -1000 Subject: [PATCH 062/426] Fix memory leak when importing a platform fails (#114602) * Fix memory leak when importing a platform fails re-raising ImportError would trigger a memory leak * fixes, coverage * Apply suggestions from code review --- homeassistant/loader.py | 31 ++++++------ tests/test_loader.py | 107 +++++++++++++++++++++++++++++++--------- 2 files changed, 98 insertions(+), 40 deletions(-) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index f462ea16886..48fd3cd54c2 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -750,9 +750,7 @@ class Integration: self._import_futures: dict[str, asyncio.Future[ModuleType]] = {} cache: dict[str, ModuleType | ComponentProtocol] = hass.data[DATA_COMPONENTS] self._cache = cache - missing_platforms_cache: dict[str, ImportError] = hass.data[ - DATA_MISSING_PLATFORMS - ] + missing_platforms_cache: dict[str, bool] = hass.data[DATA_MISSING_PLATFORMS] self._missing_platforms_cache = missing_platforms_cache self._top_level_files = top_level_files or set() _LOGGER.info("Loaded %s from %s", self.domain, pkg_path) @@ -1085,8 +1083,7 @@ class Integration: import_futures: list[tuple[str, asyncio.Future[ModuleType]]] = [] for platform_name in platform_names: - full_name = f"{domain}.{platform_name}" - if platform := self._get_platform_cached_or_raise(full_name): + if platform := self._get_platform_cached_or_raise(platform_name): platforms[platform_name] = platform continue @@ -1095,6 +1092,7 @@ class Integration: in_progress_imports[platform_name] = future continue + full_name = f"{domain}.{platform_name}" if ( self.import_executor and full_name not in self.hass.config.components @@ -1166,14 +1164,18 @@ class Integration: return platforms - def _get_platform_cached_or_raise(self, full_name: str) -> ModuleType | None: + def _get_platform_cached_or_raise(self, platform_name: str) -> ModuleType | None: """Return a platform for an integration from cache.""" + full_name = f"{self.domain}.{platform_name}" if full_name in self._cache: # the cache is either a ModuleType or a ComponentProtocol # but we only care about the ModuleType here return self._cache[full_name] # type: ignore[return-value] if full_name in self._missing_platforms_cache: - raise self._missing_platforms_cache[full_name] + raise ModuleNotFoundError( + f"Platform {full_name} not found", + name=f"{self.pkg_path}.{platform_name}", + ) return None def platforms_are_loaded(self, platform_names: Iterable[str]) -> bool: @@ -1189,9 +1191,7 @@ class Integration: def get_platform(self, platform_name: str) -> ModuleType: """Return a platform for an integration.""" - if platform := self._get_platform_cached_or_raise( - f"{self.domain}.{platform_name}" - ): + if platform := self._get_platform_cached_or_raise(platform_name): return platform return self._load_platform(platform_name) @@ -1212,10 +1212,7 @@ class Integration: ): existing_platforms.append(platform_name) continue - missing_platforms[full_name] = ModuleNotFoundError( - f"Platform {full_name} not found", - name=f"{self.pkg_path}.{platform_name}", - ) + missing_platforms[full_name] = True return existing_platforms @@ -1233,11 +1230,13 @@ class Integration: cache: dict[str, ModuleType] = self.hass.data[DATA_COMPONENTS] try: cache[full_name] = self._import_platform(platform_name) - except ImportError as ex: + except ModuleNotFoundError: if self.domain in cache: # If the domain is loaded, cache that the platform # does not exist so we do not try to load it again - self._missing_platforms_cache[full_name] = ex + self._missing_platforms_cache[full_name] = True + raise + except ImportError: raise except RuntimeError as err: # _DeadlockError inherits from RuntimeError diff --git a/tests/test_loader.py b/tests/test_loader.py index 9e191ee9e00..6685bb4f2ac 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -274,7 +274,61 @@ async def test_get_integration_exceptions(hass: HomeAssistant) -> None: async def test_get_platform_caches_failures_when_component_loaded( hass: HomeAssistant, ) -> None: - """Test get_platform cache failures only when the component is loaded.""" + """Test get_platform caches failures only when the component is loaded. + + Only ModuleNotFoundError is cached, ImportError is not cached. + """ + integration = await loader.async_get_integration(hass, "hue") + + with ( + pytest.raises(ModuleNotFoundError), + patch( + "homeassistant.loader.importlib.import_module", + side_effect=ModuleNotFoundError("Boom"), + ), + ): + assert integration.get_component() == hue + + with ( + pytest.raises(ModuleNotFoundError), + patch( + "homeassistant.loader.importlib.import_module", + side_effect=ModuleNotFoundError("Boom"), + ), + ): + assert integration.get_platform("light") == hue_light + + # Hue is not loaded so we should still hit the import_module path + with ( + pytest.raises(ModuleNotFoundError), + patch( + "homeassistant.loader.importlib.import_module", + side_effect=ModuleNotFoundError("Boom"), + ), + ): + assert integration.get_platform("light") == hue_light + + assert integration.get_component() == hue + + # Hue is loaded so we should cache the import_module failure now + with ( + pytest.raises(ModuleNotFoundError), + patch( + "homeassistant.loader.importlib.import_module", + side_effect=ModuleNotFoundError("Boom"), + ), + ): + assert integration.get_platform("light") == hue_light + + # Hue is loaded and the last call should have cached the import_module failure + with pytest.raises(ModuleNotFoundError): + assert integration.get_platform("light") == hue_light + + +async def test_get_platform_only_cached_module_not_found_when_component_loaded( + hass: HomeAssistant, +) -> None: + """Test get_platform cache only cache module not found when the component is loaded.""" integration = await loader.async_get_integration(hass, "hue") with ( @@ -317,41 +371,43 @@ async def test_get_platform_caches_failures_when_component_loaded( ): assert integration.get_platform("light") == hue_light - # Hue is loaded and the last call should have cached the import_module failure - with pytest.raises(ImportError): - assert integration.get_platform("light") == hue_light + # ImportError is not cached because we only cache ModuleNotFoundError + assert integration.get_platform("light") == hue_light async def test_async_get_platform_caches_failures_when_component_loaded( hass: HomeAssistant, ) -> None: - """Test async_get_platform cache failures only when the component is loaded.""" + """Test async_get_platform caches failures only when the component is loaded. + + Only ModuleNotFoundError is cached, ImportError is not cached. + """ integration = await loader.async_get_integration(hass, "hue") with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert integration.get_component() == hue with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platform("light") == hue_light # Hue is not loaded so we should still hit the import_module path with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platform("light") == hue_light @@ -360,16 +416,16 @@ async def test_async_get_platform_caches_failures_when_component_loaded( # Hue is loaded so we should cache the import_module failure now with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platform("light") == hue_light # Hue is loaded and the last call should have cached the import_module failure - with pytest.raises(ImportError): + with pytest.raises(ModuleNotFoundError): assert await integration.async_get_platform("light") == hue_light # The cache should never be filled because the import error is remembered @@ -379,33 +435,36 @@ async def test_async_get_platform_caches_failures_when_component_loaded( async def test_async_get_platforms_caches_failures_when_component_loaded( hass: HomeAssistant, ) -> None: - """Test async_get_platforms cache failures only when the component is loaded.""" + """Test async_get_platforms cache failures only when the component is loaded. + + Only ModuleNotFoundError is cached, ImportError is not cached. + """ integration = await loader.async_get_integration(hass, "hue") with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert integration.get_component() == hue with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platforms(["light"]) == {"light": hue_light} # Hue is not loaded so we should still hit the import_module path with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platforms(["light"]) == {"light": hue_light} @@ -414,16 +473,16 @@ async def test_async_get_platforms_caches_failures_when_component_loaded( # Hue is loaded so we should cache the import_module failure now with ( - pytest.raises(ImportError), + pytest.raises(ModuleNotFoundError), patch( "homeassistant.loader.importlib.import_module", - side_effect=ImportError("Boom"), + side_effect=ModuleNotFoundError("Boom"), ), ): assert await integration.async_get_platforms(["light"]) == {"light": hue_light} # Hue is loaded and the last call should have cached the import_module failure - with pytest.raises(ImportError): + with pytest.raises(ModuleNotFoundError): assert await integration.async_get_platforms(["light"]) == {"light": hue_light} # The cache should never be filled because the import error is remembered From 52612b10fdd6cf32e7f06a04ce5dd1fbfc66512d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 15:35:38 -1000 Subject: [PATCH 063/426] Avoid storing raw extracted traceback in system_log (#114603) This is never actually used and takes up quite a bit of ram --- homeassistant/components/system_log/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/system_log/__init__.py b/homeassistant/components/system_log/__init__.py index 77f0b095a30..423f5c6f5d8 100644 --- a/homeassistant/components/system_log/__init__.py +++ b/homeassistant/components/system_log/__init__.py @@ -166,7 +166,6 @@ class LogEntry: "level", "message", "exception", - "extracted_tb", "root_cause", "source", "count", @@ -200,7 +199,6 @@ class LogEntry: else: self.source = (record.pathname, record.lineno) self.count = 1 - self.extracted_tb = extracted_tb self.key = (self.name, self.source, self.root_cause) def to_dict(self) -> dict[str, Any]: From bc21836e7eeadcb593cb3b37107e60912cf16b7a Mon Sep 17 00:00:00 2001 From: mkmer Date: Mon, 1 Apr 2024 21:47:30 -0400 Subject: [PATCH 064/426] Bump whirlpool-sixth-sense to 0.18.7 (#114606) Bump sixth-sense to 0.18.7 --- homeassistant/components/whirlpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/whirlpool/manifest.json b/homeassistant/components/whirlpool/manifest.json index 0c46580ceeb..ee7861588ed 100644 --- a/homeassistant/components/whirlpool/manifest.json +++ b/homeassistant/components/whirlpool/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["whirlpool"], - "requirements": ["whirlpool-sixth-sense==0.18.6"] + "requirements": ["whirlpool-sixth-sense==0.18.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 77c6247e856..8fb57f0834a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2847,7 +2847,7 @@ webmin-xmlrpc==0.0.2 webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.6 +whirlpool-sixth-sense==0.18.7 # homeassistant.components.whois whois==0.9.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 648fd38f1cb..6eb3c89e09b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2194,7 +2194,7 @@ webmin-xmlrpc==0.0.2 webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.6 +whirlpool-sixth-sense==0.18.7 # homeassistant.components.whois whois==0.9.27 From 7164993562d0f4bd008da234bc9954915a7af158 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Tue, 2 Apr 2024 01:51:42 +0000 Subject: [PATCH 065/426] Bump version to 2024.4.0b4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index f56ce656157..a69c4c84e27 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b3" +PATCH_VERSION: Final = "0b4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 73bfdd6d5d7..11007e624dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b3" +version = "2024.4.0b4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From a6076a0d33326f467f59c017a8f4d9d0d452ff7e Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Tue, 2 Apr 2024 04:11:45 -0400 Subject: [PATCH 066/426] Display sonos album title with URL encoding (#113693) * unescape the title When extracting the title from the item_id, it needs to be unescaped. * sort imports --- .../components/sonos/media_browser.py | 2 +- tests/components/sonos/test_media_browser.py | 96 +++++++++++++++++++ 2 files changed, 97 insertions(+), 1 deletion(-) create mode 100644 tests/components/sonos/test_media_browser.py diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index 17327bf4be1..9d3ef5d353b 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -201,7 +201,7 @@ def build_item_response( if not title: try: - title = payload["idstring"].split("/")[1] + title = urllib.parse.unquote(payload["idstring"].split("/")[1]) except IndexError: title = LIBRARY_TITLES_MAPPING[payload["idstring"]] diff --git a/tests/components/sonos/test_media_browser.py b/tests/components/sonos/test_media_browser.py new file mode 100644 index 00000000000..cb6303c800d --- /dev/null +++ b/tests/components/sonos/test_media_browser.py @@ -0,0 +1,96 @@ +"""Tests for the Sonos Media Browser.""" + +from functools import partial + +from homeassistant.components.media_player.browse_media import BrowseMedia +from homeassistant.components.media_player.const import MediaClass, MediaType +from homeassistant.components.sonos.media_browser import ( + build_item_response, + get_thumbnail_url_full, +) +from homeassistant.core import HomeAssistant + +from .conftest import SoCoMockFactory + + +class MockMusicServiceItem: + """Mocks a Soco MusicServiceItem.""" + + def __init__( + self, + title: str, + item_id: str, + parent_id: str, + item_class: str, + ) -> None: + """Initialize the mock item.""" + self.title = title + self.item_id = item_id + self.item_class = item_class + self.parent_id = parent_id + + def get_uri(self) -> str: + """Return URI.""" + return self.item_id.replace("S://", "x-file-cifs://") + + +def mock_browse_by_idstring( + search_type: str, idstring: str, start=0, max_items=100, full_album_art_uri=False +) -> list[MockMusicServiceItem]: + """Mock the call to browse_by_id_string.""" + if search_type == "albums" and ( + idstring == "A:ALBUM/Abbey%20Road" or idstring == "A:ALBUM/Abbey Road" + ): + return [ + MockMusicServiceItem( + "Come Together", + "S://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3", + "A:ALBUM/Abbey%20Road", + "object.item.audioItem.musicTrack", + ), + MockMusicServiceItem( + "Something", + "S://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3", + "A:ALBUM/Abbey%20Road", + "object.item.audioItem.musicTrack", + ), + ] + return None + + +async def test_build_item_response( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, + async_autosetup_sonos, + soco, + discover, +) -> None: + """Test building a browse item response.""" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + soco_mock.music_library.browse_by_idstring = mock_browse_by_idstring + browse_item: BrowseMedia = build_item_response( + soco_mock.music_library, + {"search_type": MediaType.ALBUM, "idstring": "A:ALBUM/Abbey%20Road"}, + partial( + get_thumbnail_url_full, + soco_mock.music_library, + True, + None, + ), + ) + assert browse_item.title == "Abbey Road" + assert browse_item.media_class == MediaClass.ALBUM + assert browse_item.media_content_id == "A:ALBUM/Abbey%20Road" + assert len(browse_item.children) == 2 + assert browse_item.children[0].media_class == MediaClass.TRACK + assert browse_item.children[0].title == "Come Together" + assert ( + browse_item.children[0].media_content_id + == "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3" + ) + assert browse_item.children[1].media_class == MediaClass.TRACK + assert browse_item.children[1].title == "Something" + assert ( + browse_item.children[1].media_content_id + == "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3" + ) From 18b6de567d4f481c65368452c49e483117eec721 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 2 Apr 2024 11:15:52 +0200 Subject: [PATCH 067/426] Bump roombapy to 1.8.1 (#114478) * Bump roombapy to 1.7.0 * Bump * Bump * Fix --- homeassistant/components/roomba/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/roomba/test_config_flow.py | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/roomba/manifest.json b/homeassistant/components/roomba/manifest.json index ae08d8f6a1f..a697680b379 100644 --- a/homeassistant/components/roomba/manifest.json +++ b/homeassistant/components/roomba/manifest.json @@ -24,7 +24,7 @@ "documentation": "https://www.home-assistant.io/integrations/roomba", "iot_class": "local_push", "loggers": ["paho_mqtt", "roombapy"], - "requirements": ["roombapy==1.6.13"], + "requirements": ["roombapy==1.8.1"], "zeroconf": [ { "type": "_amzn-alexa._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 8fb57f0834a..a98a76af9b0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2462,7 +2462,7 @@ rokuecp==0.19.2 romy==0.0.7 # homeassistant.components.roomba -roombapy==1.6.13 +roombapy==1.8.1 # homeassistant.components.roon roonapi==0.1.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6eb3c89e09b..3937eae1e53 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1896,7 +1896,7 @@ rokuecp==0.19.2 romy==0.0.7 # homeassistant.components.roomba -roombapy==1.6.13 +roombapy==1.8.1 # homeassistant.components.roon roonapi==0.1.6 diff --git a/tests/components/roomba/test_config_flow.py b/tests/components/roomba/test_config_flow.py index 2eaf3b14e38..282884c0be3 100644 --- a/tests/components/roomba/test_config_flow.py +++ b/tests/components/roomba/test_config_flow.py @@ -99,12 +99,12 @@ def _mocked_discovery(*_): roomba = RoombaInfo( hostname="irobot-BLID", - robotname="robot_name", + robot_name="robot_name", ip=MOCK_IP, mac="mac", - sw="firmware", + firmware="firmware", sku="sku", - cap={"cap": 1}, + capabilities={"cap": 1}, ) roomba_discovery.get_all = MagicMock(return_value=[roomba]) From 7b84e86f898efa14dca4e74323d3aecff1f0e703 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Tue, 2 Apr 2024 10:15:58 +0200 Subject: [PATCH 068/426] Improve Shelly RPC device update progress (#114566) Co-authored-by: Shay Levy Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/shelly/update.py | 16 ++++++++++------ tests/components/shelly/test_update.py | 19 +++++++++++-------- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index f6a89c5381b..56ad1f2ef67 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -222,7 +222,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) - self._ota_in_progress: bool = False + self._ota_in_progress: bool | int = False self._attr_release_url = get_release_url( coordinator.device.gen, coordinator.model, description.beta ) @@ -237,14 +237,13 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): @callback def _ota_progress_callback(self, event: dict[str, Any]) -> None: """Handle device OTA progress.""" - if self._ota_in_progress: + if self.in_progress is not False: event_type = event["event"] if event_type == OTA_BEGIN: - self._attr_in_progress = 0 + self._ota_in_progress = 0 elif event_type == OTA_PROGRESS: - self._attr_in_progress = event["progress_percent"] + self._ota_in_progress = event["progress_percent"] elif event_type in (OTA_ERROR, OTA_SUCCESS): - self._attr_in_progress = False self._ota_in_progress = False self.async_write_ha_state() @@ -262,6 +261,11 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): return self.installed_version + @property + def in_progress(self) -> bool | int: + """Update installation in progress.""" + return self._ota_in_progress + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -292,7 +296,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): await self.coordinator.async_shutdown_device_and_start_reauth() else: self._ota_in_progress = True - LOGGER.debug("OTA update call successful") + LOGGER.info("OTA update call for %s successful", self.coordinator.name) class RpcSleepingUpdateEntity( diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 387dc93e33e..f3960620a21 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -255,6 +255,16 @@ async def test_rpc_update( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) + + assert mock_rpc_device.trigger_ota_update.call_count == 1 + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "2" + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL + inject_rpc_device_event( monkeypatch, mock_rpc_device, @@ -270,14 +280,7 @@ async def test_rpc_update( }, ) - assert mock_rpc_device.trigger_ota_update.call_count == 1 - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "2" - assert state.attributes[ATTR_IN_PROGRESS] == 0 - assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL + assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 0 inject_rpc_device_event( monkeypatch, From e5a620545c49a9181627f606a0a431dc4df9dd46 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Tue, 2 Apr 2024 20:23:08 +1000 Subject: [PATCH 069/426] Fix battery heater in Tessie (#114568) --- homeassistant/components/tessie/binary_sensor.py | 2 +- homeassistant/components/tessie/strings.json | 2 +- tests/components/tessie/snapshots/test_binary_sensors.ambr | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tessie/binary_sensor.py b/homeassistant/components/tessie/binary_sensor.py index 015fa63736f..9b7d6861dfb 100644 --- a/homeassistant/components/tessie/binary_sensor.py +++ b/homeassistant/components/tessie/binary_sensor.py @@ -34,7 +34,7 @@ DESCRIPTIONS: tuple[TessieBinarySensorEntityDescription, ...] = ( is_on=lambda x: x == TessieState.ONLINE, ), TessieBinarySensorEntityDescription( - key="charge_state_battery_heater_on", + key="climate_state_battery_heater", device_class=BinarySensorDeviceClass.HEAT, entity_category=EntityCategory.DIAGNOSTIC, ), diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 62de4f276f4..8e1e47f934f 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -252,7 +252,7 @@ "state": { "name": "Status" }, - "charge_state_battery_heater_on": { + "climate_state_battery_heater": { "name": "Battery heater" }, "charge_state_charge_enable_request": { diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensors.ambr index 854e1350234..7bc191de6ed 100644 --- a/tests/components/tessie/snapshots/test_binary_sensors.ambr +++ b/tests/components/tessie/snapshots/test_binary_sensors.ambr @@ -165,8 +165,8 @@ 'platform': 'tessie', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'charge_state_battery_heater_on', - 'unique_id': 'VINVINVIN-charge_state_battery_heater_on', + 'translation_key': 'climate_state_battery_heater', + 'unique_id': 'VINVINVIN-climate_state_battery_heater', 'unit_of_measurement': None, }) # --- From 384d10a51d298f7bddbd6c16dfc07bc3c4bef165 Mon Sep 17 00:00:00 2001 From: mkmer Date: Tue, 2 Apr 2024 03:41:40 -0400 Subject: [PATCH 070/426] Add diagnostic platform to Whirlpool (#114578) * Add diagnostic platform and tests * lowercase variable * Correc doc string --- .../components/whirlpool/diagnostics.py | 49 +++++++++++++++++++ .../whirlpool/snapshots/test_diagnostics.ambr | 44 +++++++++++++++++ .../components/whirlpool/test_diagnostics.py | 32 ++++++++++++ 3 files changed, 125 insertions(+) create mode 100644 homeassistant/components/whirlpool/diagnostics.py create mode 100644 tests/components/whirlpool/snapshots/test_diagnostics.ambr create mode 100644 tests/components/whirlpool/test_diagnostics.py diff --git a/homeassistant/components/whirlpool/diagnostics.py b/homeassistant/components/whirlpool/diagnostics.py new file mode 100644 index 00000000000..9b1dd00e7bd --- /dev/null +++ b/homeassistant/components/whirlpool/diagnostics.py @@ -0,0 +1,49 @@ +"""Diagnostics support for Whirlpool.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from . import WhirlpoolData +from .const import DOMAIN + +TO_REDACT = { + "SERIAL_NUMBER", + "macaddress", + "username", + "password", + "token", + "unique_id", + "SAID", +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, + config_entry: ConfigEntry, +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + whirlpool: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + diagnostics_data = { + "Washer_dryers": { + wd["NAME"]: dict(wd.items()) + for wd in whirlpool.appliances_manager.washer_dryers + }, + "aircons": { + ac["NAME"]: dict(ac.items()) for ac in whirlpool.appliances_manager.aircons + }, + "ovens": { + oven["NAME"]: dict(oven.items()) + for oven in whirlpool.appliances_manager.ovens + }, + } + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "appliances": async_redact_data(diagnostics_data, TO_REDACT), + } diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..5a0beb112e6 --- /dev/null +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -0,0 +1,44 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'appliances': dict({ + 'Washer_dryers': dict({ + 'dryer': dict({ + 'NAME': 'dryer', + 'SAID': '**REDACTED**', + }), + 'washer': dict({ + 'NAME': 'washer', + 'SAID': '**REDACTED**', + }), + }), + 'aircons': dict({ + 'TestZone': dict({ + 'NAME': 'TestZone', + 'SAID': '**REDACTED**', + }), + }), + 'ovens': dict({ + }), + }), + 'config_entry': dict({ + 'data': dict({ + 'brand': 'Whirlpool', + 'password': '**REDACTED**', + 'region': 'EU', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'whirlpool', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/whirlpool/test_diagnostics.py b/tests/components/whirlpool/test_diagnostics.py new file mode 100644 index 00000000000..6cfc1b76e38 --- /dev/null +++ b/tests/components/whirlpool/test_diagnostics.py @@ -0,0 +1,32 @@ +"""Test Blink diagnostics.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + +YAML_CONFIG = {"username": "test-user", "password": "test-password"} + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + mock_appliances_manager_api: MagicMock, + mock_aircon1_api: MagicMock, + mock_aircon_api_instances: MagicMock, +) -> None: + """Test config entry diagnostics.""" + + mock_entry = await init_integration(hass) + + result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) + + assert result == snapshot(exclude=props("entry_id")) From 559fe65471fa29339b005f610c1133fa50c3259a Mon Sep 17 00:00:00 2001 From: Jack Boswell Date: Tue, 2 Apr 2024 21:45:46 +1300 Subject: [PATCH 071/426] Catch potential ValueError when getting or setting Starlink sleep values (#114607) --- homeassistant/components/starlink/time.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/starlink/time.py b/homeassistant/components/starlink/time.py index 4d9e2d06675..6475610564d 100644 --- a/homeassistant/components/starlink/time.py +++ b/homeassistant/components/starlink/time.py @@ -10,6 +10,7 @@ import math from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -62,14 +63,22 @@ class StarlinkTimeEntity(StarlinkEntity, TimeEntity): def _utc_minutes_to_time(utc_minutes: int, timezone: tzinfo) -> time: hour = math.floor(utc_minutes / 60) minute = utc_minutes % 60 - utc = datetime.now(UTC).replace(hour=hour, minute=minute, second=0, microsecond=0) + try: + utc = datetime.now(UTC).replace( + hour=hour, minute=minute, second=0, microsecond=0 + ) + except ValueError as exc: + raise HomeAssistantError from exc return utc.astimezone(timezone).time() def _time_to_utc_minutes(t: time, timezone: tzinfo) -> int: - zoned_time = datetime.now(timezone).replace( - hour=t.hour, minute=t.minute, second=0, microsecond=0 - ) + try: + zoned_time = datetime.now(timezone).replace( + hour=t.hour, minute=t.minute, second=0, microsecond=0 + ) + except ValueError as exc: + raise HomeAssistantError from exc utc_time = zoned_time.astimezone(UTC).time() return (utc_time.hour * 60) + utc_time.minute From 230c29edbed885a39ba7fd38d098b40e84c74ded Mon Sep 17 00:00:00 2001 From: max2697 <143563471+max2697@users.noreply.github.com> Date: Tue, 2 Apr 2024 02:04:28 -0500 Subject: [PATCH 072/426] Bump opower to 0.4.2 (#114608) --- homeassistant/components/opower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index bc6f8796d50..879aeb0327b 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.4.1"] + "requirements": ["opower==0.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index a98a76af9b0..a6f4d51d89b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1482,7 +1482,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.4.1 +opower==0.4.2 # homeassistant.components.oralb oralb-ble==0.17.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3937eae1e53..fdb4c92e442 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1176,7 +1176,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.opower -opower==0.4.1 +opower==0.4.2 # homeassistant.components.oralb oralb-ble==0.17.6 From 92dfec3c98359d1ecabc7d42871812c56eebb537 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 2 Apr 2024 10:43:14 +0200 Subject: [PATCH 073/426] Add floor selector (#114614) --- homeassistant/helpers/selector.py | 42 +++++++++++++++++++ tests/helpers/test_selector.py | 67 +++++++++++++++++++++++++++++++ 2 files changed, 109 insertions(+) diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 938cc6a9246..c4db601fac6 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -844,6 +844,48 @@ class EntitySelector(Selector[EntitySelectorConfig]): return cast(list, vol.Schema([validate])(data)) # Output is a list +class FloorSelectorConfig(TypedDict, total=False): + """Class to represent an floor selector config.""" + + entity: EntityFilterSelectorConfig | list[EntityFilterSelectorConfig] + device: DeviceFilterSelectorConfig | list[DeviceFilterSelectorConfig] + multiple: bool + + +@SELECTORS.register("floor") +class FloorSelector(Selector[AreaSelectorConfig]): + """Selector of a single or list of floors.""" + + selector_type = "floor" + + CONFIG_SCHEMA = vol.Schema( + { + vol.Optional("entity"): vol.All( + cv.ensure_list, + [ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA], + ), + vol.Optional("device"): vol.All( + cv.ensure_list, + [DEVICE_FILTER_SELECTOR_CONFIG_SCHEMA], + ), + vol.Optional("multiple", default=False): cv.boolean, + } + ) + + def __init__(self, config: FloorSelectorConfig | None = None) -> None: + """Instantiate a selector.""" + super().__init__(config) + + def __call__(self, data: Any) -> str | list[str]: + """Validate the passed selection.""" + if not self.config["multiple"]: + floor_id: str = vol.Schema(str)(data) + return floor_id + if not isinstance(data, list): + raise vol.Invalid("Value should be a list") + return [vol.Schema(str)(val) for val in data] + + class IconSelectorConfig(TypedDict, total=False): """Class to represent an icon selector config.""" diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index 0dc7e570fc5..8864edc7386 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -1158,3 +1158,70 @@ def test_qr_code_selector_schema(schema, valid_selections, invalid_selections) - def test_label_selector_schema(schema, valid_selections, invalid_selections) -> None: """Test label selector.""" _test_selector("label", schema, valid_selections, invalid_selections) + + +@pytest.mark.parametrize( + ("schema", "valid_selections", "invalid_selections"), + [ + ({}, ("abc123",), (None,)), + ({"entity": {}}, ("abc123",), (None,)), + ({"entity": {"domain": "light"}}, ("abc123",), (None,)), + ( + {"entity": {"domain": "binary_sensor", "device_class": "motion"}}, + ("abc123",), + (None,), + ), + ( + { + "entity": { + "domain": "binary_sensor", + "device_class": "motion", + "integration": "demo", + } + }, + ("abc123",), + (None,), + ), + ( + { + "entity": [ + {"domain": "light"}, + {"domain": "binary_sensor", "device_class": "motion"}, + ] + }, + ("abc123",), + (None,), + ), + ( + {"device": {"integration": "demo", "model": "mock-model"}}, + ("abc123",), + (None,), + ), + ( + { + "device": [ + {"integration": "demo", "model": "mock-model"}, + {"integration": "other-demo", "model": "other-mock-model"}, + ] + }, + ("abc123",), + (None,), + ), + ( + { + "entity": {"domain": "binary_sensor", "device_class": "motion"}, + "device": {"integration": "demo", "model": "mock-model"}, + }, + ("abc123",), + (None,), + ), + ( + {"multiple": True}, + ((["abc123", "def456"],)), + (None, "abc123", ["abc123", None]), + ), + ], +) +def test_floor_selector_schema(schema, valid_selections, invalid_selections) -> None: + """Test floor selector.""" + _test_selector("floor", schema, valid_selections, invalid_selections) From ca31479d298490d9a19e55dace29648e3e92dba4 Mon Sep 17 00:00:00 2001 From: Fexiven <48439988+Fexiven@users.noreply.github.com> Date: Tue, 2 Apr 2024 12:04:07 +0200 Subject: [PATCH 074/426] Fix Starlink integration startup issue (#114615) --- homeassistant/components/starlink/coordinator.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/starlink/coordinator.py b/homeassistant/components/starlink/coordinator.py index 9c597fbb033..ff33b3ecc41 100644 --- a/homeassistant/components/starlink/coordinator.py +++ b/homeassistant/components/starlink/coordinator.py @@ -58,14 +58,14 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): async def _async_update_data(self) -> StarlinkData: async with asyncio.timeout(4): try: - status, location, sleep = await asyncio.gather( - self.hass.async_add_executor_job(status_data, self.channel_context), - self.hass.async_add_executor_job( - location_data, self.channel_context - ), - self.hass.async_add_executor_job( - get_sleep_config, self.channel_context - ), + status = await self.hass.async_add_executor_job( + status_data, self.channel_context + ) + location = await self.hass.async_add_executor_job( + location_data, self.channel_context + ) + sleep = await self.hass.async_add_executor_job( + get_sleep_config, self.channel_context ) return StarlinkData(location, sleep, *status) except GrpcError as exc: From b539b25682754f9d4abb7ef4361184a7054c52ac Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 2 Apr 2024 12:17:52 +0200 Subject: [PATCH 075/426] Update frontend to 20240402.0 (#114627) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 7864801a986..5eaa6e94769 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240329.1"] + "requirements": ["home-assistant-frontend==20240402.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9621137d855..eb6d347a479 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240329.1 +home-assistant-frontend==20240402.0 home-assistant-intents==2024.3.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index a6f4d51d89b..adfcc1322e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240329.1 +home-assistant-frontend==20240402.0 # homeassistant.components.conversation home-assistant-intents==2024.3.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fdb4c92e442..85fb1c70471 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240329.1 +home-assistant-frontend==20240402.0 # homeassistant.components.conversation home-assistant-intents==2024.3.29 From 5af5f3694ef2816e34c585f04ff9862f929b7278 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 2 Apr 2024 12:28:20 +0200 Subject: [PATCH 076/426] Bump version to 2024.4.0b5 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index a69c4c84e27..0cac1ae45a3 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b4" +PATCH_VERSION: Final = "0b5" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 11007e624dc..62c9166cd22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b4" +version = "2024.4.0b5" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 4e0d6f287ed716edcf9a39267ce08dce4b4cd1da Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Tue, 2 Apr 2024 08:16:59 -0700 Subject: [PATCH 077/426] Reduce ZHA OTA logbook entries and extraneous updates (#114591) --- .../components/zha/core/cluster_handlers/general.py | 7 +++++++ homeassistant/components/zha/update.py | 5 ----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/zha/core/cluster_handlers/general.py b/homeassistant/components/zha/core/cluster_handlers/general.py index 478f41da3b7..438fc6b1723 100644 --- a/homeassistant/components/zha/core/cluster_handlers/general.py +++ b/homeassistant/components/zha/core/cluster_handlers/general.py @@ -553,6 +553,13 @@ class OtaClientClusterHandler(ClientClusterHandler): Ota.AttributeDefs.current_file_version.name: True, } + @callback + def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: + """Handle an attribute updated on this cluster.""" + # We intentionally avoid the `ClientClusterHandler` attribute update handler: + # it emits a logbook event on every update, which pollutes the logbook + ClusterHandler.attribute_updated(self, attrid, value, timestamp) + @property def current_file_version(self) -> int | None: """Return cached value of current_file_version attribute.""" diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index 7ceba4fc924..0cb80d13119 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -130,14 +130,9 @@ class ZHAFirmwareUpdateEntity( def _get_cluster_version(self) -> str | None: """Synchronize current file version with the cluster.""" - device = self._ota_cluster_handler._endpoint.device # pylint: disable=protected-access - if self._ota_cluster_handler.current_file_version is not None: return f"0x{self._ota_cluster_handler.current_file_version:08x}" - if device.sw_version is not None: - return device.sw_version - return None @callback From d53848aae44d4c25849fdaea29ad22b408a77400 Mon Sep 17 00:00:00 2001 From: dotvav Date: Tue, 2 Apr 2024 13:08:53 +0200 Subject: [PATCH 078/426] Fix Overkiz Hitachi OVP air-to-air heat pump (#114611) --- .../climate_entities/hitachi_air_to_air_heat_pump_ovp.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py index b4d6ab788a1..b31ecf91ec0 100644 --- a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py +++ b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py @@ -298,6 +298,11 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity): OverkizState.OVP_FAN_SPEED, OverkizCommandParam.AUTO, ) + # Sanitize fan mode: Overkiz is sometimes providing a state that + # cannot be used as a command. Convert it to HA space and back to Overkiz + if fan_mode not in FAN_MODES_TO_OVERKIZ.values(): + fan_mode = FAN_MODES_TO_OVERKIZ[OVERKIZ_TO_FAN_MODES[fan_mode]] + hvac_mode = self._control_backfill( hvac_mode, OverkizState.OVP_MODE_CHANGE, From 5bd52da13a595f9bb41690bfe2096a01d427afce Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Tue, 2 Apr 2024 12:17:47 +0100 Subject: [PATCH 079/426] Bump ring_doorbell integration to 0.8.9 (#114631) --- homeassistant/components/ring/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 764557a3a1d..67e2cfcdc78 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/ring", "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], - "requirements": ["ring-doorbell[listen]==0.8.8"] + "requirements": ["ring-doorbell[listen]==0.8.9"] } diff --git a/requirements_all.txt b/requirements_all.txt index adfcc1322e3..fd44076ef24 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2444,7 +2444,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.8 +ring-doorbell[listen]==0.8.9 # homeassistant.components.fleetgo ritassist==0.9.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 85fb1c70471..70c27940562 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1887,7 +1887,7 @@ reolink-aio==0.8.9 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.8 +ring-doorbell[listen]==0.8.9 # homeassistant.components.roku rokuecp==0.19.2 From 8cbedbe26b6e588f652c566073716ef76d081aa6 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 2 Apr 2024 17:15:24 +0200 Subject: [PATCH 080/426] Update frontend to 20240402.1 (#114646) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 5eaa6e94769..2010a9985b3 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240402.0"] + "requirements": ["home-assistant-frontend==20240402.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index eb6d347a479..cf0aa0848af 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240402.0 +home-assistant-frontend==20240402.1 home-assistant-intents==2024.3.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index fd44076ef24..5bb02a079fd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240402.0 +home-assistant-frontend==20240402.1 # homeassistant.components.conversation home-assistant-intents==2024.3.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70c27940562..e25784596a3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.45 # homeassistant.components.frontend -home-assistant-frontend==20240402.0 +home-assistant-frontend==20240402.1 # homeassistant.components.conversation home-assistant-intents==2024.3.29 From 85fb4a27a3a728d1bf178b300ed4055f956dd28c Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 2 Apr 2024 17:35:01 +0200 Subject: [PATCH 081/426] Bump version to 2024.4.0b6 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 0cac1ae45a3..fd3b1257df2 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b5" +PATCH_VERSION: Final = "0b6" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 62c9166cd22..73d03f0f92c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b5" +version = "2024.4.0b6" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 2ce784105d40890e36eceb6a12d2eb28df663668 Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Tue, 2 Apr 2024 18:10:15 -0400 Subject: [PATCH 082/426] Fix Sonos play imported playlists (#113934) --- .../components/sonos/media_browser.py | 14 +++ .../components/sonos/media_player.py | 12 +- tests/components/sonos/test_media_player.py | 117 ++++++++++++++++++ 3 files changed, 137 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index 9d3ef5d353b..87ee3ed3b4d 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -493,6 +493,20 @@ def get_media( """Fetch media/album.""" search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type) + if search_type == "playlists": + # Format is S:TITLE or S:ITEM_ID + splits = item_id.split(":") + title = splits[1] if len(splits) > 1 else None + playlist = next( + ( + p + for p in media_library.get_playlists() + if (item_id == p.item_id or title == p.title) + ), + None, + ) + return playlist + if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM: item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:]) diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 12e8b44652a..581bdaad37d 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -626,13 +626,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): soco.play_uri(media_id, force_radio=is_radio) elif media_type == MediaType.PLAYLIST: if media_id.startswith("S:"): - item = media_browser.get_media(self.media.library, media_id, media_type) - soco.play_uri(item.get_uri()) - return - try: + playlist = media_browser.get_media( + self.media.library, media_id, media_type + ) + else: playlists = soco.get_sonos_playlists(complete_result=True) - playlist = next(p for p in playlists if p.title == media_id) - except StopIteration: + playlist = next((p for p in playlists if p.title == media_id), None) + if not playlist: _LOGGER.error('Could not find a Sonos playlist named "%s"', media_id) else: soco.clear_queue() diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index d89a1076db3..c181520b85d 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -1,5 +1,13 @@ """Tests for the Sonos Media Player platform.""" +import logging + +import pytest + +from homeassistant.components.media_player import ( + DOMAIN as MP_DOMAIN, + SERVICE_PLAY_MEDIA, +) from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import ( @@ -8,6 +16,8 @@ from homeassistant.helpers.device_registry import ( DeviceRegistry, ) +from .conftest import SoCoMockFactory + async def test_device_registry( hass: HomeAssistant, device_registry: DeviceRegistry, async_autosetup_sonos, soco @@ -53,3 +63,110 @@ async def test_entity_basic( assert attributes["friendly_name"] == "Zone A" assert attributes["is_volume_muted"] is False assert attributes["volume_level"] == 0.19 + + +class _MockMusicServiceItem: + """Mocks a Soco MusicServiceItem.""" + + def __init__( + self, + title: str, + item_id: str, + parent_id: str, + item_class: str, + ) -> None: + """Initialize the mock item.""" + self.title = title + self.item_id = item_id + self.item_class = item_class + self.parent_id = parent_id + + def get_uri(self) -> str: + """Return URI.""" + return self.item_id.replace("S://", "x-file-cifs://") + + +_mock_playlists = [ + _MockMusicServiceItem( + "playlist1", + "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_1", + "A:PLAYLISTS", + "object.container.playlistContainer", + ), + _MockMusicServiceItem( + "playlist2", + "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_2", + "A:PLAYLISTS", + "object.container.playlistContainer", + ), +] + + +@pytest.mark.parametrize( + ("media_content_id", "expected_item_id"), + [ + ( + _mock_playlists[0].item_id, + _mock_playlists[0].item_id, + ), + ( + f"S:{_mock_playlists[1].title}", + _mock_playlists[1].item_id, + ), + ], +) +async def test_play_media_music_library_playlist( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, + async_autosetup_sonos, + discover, + media_content_id, + expected_item_id, +) -> None: + """Test that playlists can be found by id or title.""" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + soco_mock.music_library.get_playlists.return_value = _mock_playlists + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": media_content_id, + }, + blocking=True, + ) + + assert soco_mock.clear_queue.call_count == 1 + assert soco_mock.add_to_queue.call_count == 1 + assert soco_mock.add_to_queue.call_args_list[0].args[0].item_id == expected_item_id + assert soco_mock.play_from_queue.call_count == 1 + + +async def test_play_media_music_library_playlist_dne( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, + async_autosetup_sonos, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test error handling when attempting to play a non-existent playlist .""" + media_content_id = "S:nonexistent" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + soco_mock.music_library.get_playlists.return_value = _mock_playlists + + with caplog.at_level(logging.ERROR): + caplog.clear() + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": media_content_id, + }, + blocking=True, + ) + assert soco_mock.play_uri.call_count == 0 + assert media_content_id in caplog.text + assert "playlist" in caplog.text From fa2f49693c206e2cec938bfb8763cfda21ddef01 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 2 Apr 2024 18:33:12 +0200 Subject: [PATCH 083/426] Bump aiounifi to v74 (#114649) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 63f9f67605e..05dc2189908 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==73"], + "requirements": ["aiounifi==74"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 5bb02a079fd..4bc0c15d06d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -392,7 +392,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==73 +aiounifi==74 # homeassistant.components.vlc_telnet aiovlc==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e25784596a3..0c363f73167 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -365,7 +365,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==73 +aiounifi==74 # homeassistant.components.vlc_telnet aiovlc==0.1.0 From 4e0290ce0e91286c54769b02eada309ce30017dd Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Tue, 2 Apr 2024 18:28:52 +0200 Subject: [PATCH 084/426] Add missing state to the Tractive tracker state sensor (#114654) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/tractive/sensor.py | 1 + homeassistant/components/tractive/strings.json | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/tractive/sensor.py b/homeassistant/components/tractive/sensor.py index 5e2f3288f57..1edee71467b 100644 --- a/homeassistant/components/tractive/sensor.py +++ b/homeassistant/components/tractive/sensor.py @@ -107,6 +107,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.ENUM, options=[ + "inaccurate_position", "not_reporting", "operational", "system_shutdown_user", diff --git a/homeassistant/components/tractive/strings.json b/homeassistant/components/tractive/strings.json index 82b7ecc295c..0690328c99c 100644 --- a/homeassistant/components/tractive/strings.json +++ b/homeassistant/components/tractive/strings.json @@ -70,6 +70,7 @@ "tracker_state": { "name": "Tracker state", "state": { + "inaccurate_position": "Inaccurate position", "not_reporting": "Not reporting", "operational": "Operational", "system_shutdown_user": "System shutdown user", From 02dee343380fe2b78fd10971e3eb28bfd62c621c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 2 Apr 2024 20:58:18 +0300 Subject: [PATCH 085/426] Bump holidays to 0.46 (#114657) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index f1bc60dece4..5a1edcd3c3f 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.45", "babel==2.13.1"] + "requirements": ["holidays==0.46", "babel==2.13.1"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 6b17a980870..314f4c6bcf4 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.45"] + "requirements": ["holidays==0.46"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4bc0c15d06d..6776b8daa0b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1074,7 +1074,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.45 +holidays==0.46 # homeassistant.components.frontend home-assistant-frontend==20240402.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0c363f73167..5140cbaa5e4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -873,7 +873,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.45 +holidays==0.46 # homeassistant.components.frontend home-assistant-frontend==20240402.1 From 639c4a843bd3b4926c05d83e9ebc22b330ad59c6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 2 Apr 2024 11:22:40 -1000 Subject: [PATCH 086/426] Avoid trying to load platform that are known to not exist in async_prepare_setup_platform (#114659) --- homeassistant/setup.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 178ee6425e3..2e64fefee77 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -504,6 +504,12 @@ async def async_prepare_setup_platform( log_error(f"Unable to import the component ({exc}).") return None + if not integration.platforms_exists((domain,)): + log_error( + f"Platform not found (No module named '{integration.pkg_path}.{domain}')" + ) + return None + try: platform = await integration.async_get_platform(domain) except ImportError as exc: From f676448f27141ba21af81e05b747ef94b19e013c Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 3 Apr 2024 00:12:31 +0200 Subject: [PATCH 087/426] Update frontend to 20240402.2 (#114683) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 2010a9985b3..3ac7efa9fab 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240402.1"] + "requirements": ["home-assistant-frontend==20240402.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cf0aa0848af..80cea24b817 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240402.1 +home-assistant-frontend==20240402.2 home-assistant-intents==2024.3.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 6776b8daa0b..2be277f422a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240402.1 +home-assistant-frontend==20240402.2 # homeassistant.components.conversation home-assistant-intents==2024.3.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5140cbaa5e4..b742a3fa589 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240402.1 +home-assistant-frontend==20240402.2 # homeassistant.components.conversation home-assistant-intents==2024.3.29 From 8bdb27c88b7e1a837000116e5cb9231ef8f152a2 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 3 Apr 2024 00:14:07 +0200 Subject: [PATCH 088/426] Bump version to 2024.4.0b7 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index fd3b1257df2..7dc1177c62f 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b6" +PATCH_VERSION: Final = "0b7" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 73d03f0f92c..7bf5f806dd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b6" +version = "2024.4.0b7" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 79fa7caa41c54162c431ed6ae42f54df5e328b32 Mon Sep 17 00:00:00 2001 From: Lenn <78048721+LennP@users.noreply.github.com> Date: Wed, 3 Apr 2024 12:11:46 +0200 Subject: [PATCH 089/426] Rename Motionblinds BLE integration to Motionblinds Bluetooth (#114584) --- homeassistant/components/motionblinds_ble/__init__.py | 10 +++++----- homeassistant/components/motionblinds_ble/button.py | 2 +- .../components/motionblinds_ble/config_flow.py | 4 ++-- homeassistant/components/motionblinds_ble/const.py | 2 +- homeassistant/components/motionblinds_ble/cover.py | 2 +- homeassistant/components/motionblinds_ble/entity.py | 4 ++-- .../components/motionblinds_ble/manifest.json | 2 +- homeassistant/components/motionblinds_ble/select.py | 2 +- homeassistant/generated/integrations.json | 2 +- tests/components/motionblinds_ble/__init__.py | 2 +- tests/components/motionblinds_ble/conftest.py | 2 +- tests/components/motionblinds_ble/test_config_flow.py | 2 +- 12 files changed, 18 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/motionblinds_ble/__init__.py b/homeassistant/components/motionblinds_ble/__init__.py index f70625cd36d..3c6df12e878 100644 --- a/homeassistant/components/motionblinds_ble/__init__.py +++ b/homeassistant/components/motionblinds_ble/__init__.py @@ -1,4 +1,4 @@ -"""Motionblinds BLE integration.""" +"""Motionblinds Bluetooth integration.""" from __future__ import annotations @@ -34,9 +34,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up Motionblinds BLE integration.""" + """Set up Motionblinds Bluetooth integration.""" - _LOGGER.debug("Setting up Motionblinds BLE integration") + _LOGGER.debug("Setting up Motionblinds Bluetooth integration") # The correct time is needed for encryption _LOGGER.debug("Setting timezone for encryption: %s", hass.config.time_zone) @@ -46,7 +46,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Motionblinds BLE device from a config entry.""" + """Set up Motionblinds Bluetooth device from a config entry.""" _LOGGER.debug("(%s) Setting up device", entry.data[CONF_MAC_CODE]) @@ -94,7 +94,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload Motionblinds BLE device from a config entry.""" + """Unload Motionblinds Bluetooth device from a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) diff --git a/homeassistant/components/motionblinds_ble/button.py b/homeassistant/components/motionblinds_ble/button.py index d3bd22e9276..a099276cd85 100644 --- a/homeassistant/components/motionblinds_ble/button.py +++ b/homeassistant/components/motionblinds_ble/button.py @@ -1,4 +1,4 @@ -"""Button entities for the Motionblinds BLE integration.""" +"""Button entities for the Motionblinds Bluetooth integration.""" from __future__ import annotations diff --git a/homeassistant/components/motionblinds_ble/config_flow.py b/homeassistant/components/motionblinds_ble/config_flow.py index 0282c4d5584..23302ae9624 100644 --- a/homeassistant/components/motionblinds_ble/config_flow.py +++ b/homeassistant/components/motionblinds_ble/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for Motionblinds BLE integration.""" +"""Config flow for Motionblinds Bluetooth integration.""" from __future__ import annotations @@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str}) class FlowHandler(ConfigFlow, domain=DOMAIN): - """Handle a config flow for Motionblinds BLE.""" + """Handle a config flow for Motionblinds Bluetooth.""" def __init__(self) -> None: """Initialize a ConfigFlow.""" diff --git a/homeassistant/components/motionblinds_ble/const.py b/homeassistant/components/motionblinds_ble/const.py index d2eb5821b9f..bd88927559e 100644 --- a/homeassistant/components/motionblinds_ble/const.py +++ b/homeassistant/components/motionblinds_ble/const.py @@ -1,4 +1,4 @@ -"""Constants for the Motionblinds BLE integration.""" +"""Constants for the Motionblinds Bluetooth integration.""" ATTR_CONNECT = "connect" ATTR_DISCONNECT = "disconnect" diff --git a/homeassistant/components/motionblinds_ble/cover.py b/homeassistant/components/motionblinds_ble/cover.py index c4f14dc5605..afeeb5b0d70 100644 --- a/homeassistant/components/motionblinds_ble/cover.py +++ b/homeassistant/components/motionblinds_ble/cover.py @@ -1,4 +1,4 @@ -"""Cover entities for the Motionblinds BLE integration.""" +"""Cover entities for the Motionblinds Bluetooth integration.""" from __future__ import annotations diff --git a/homeassistant/components/motionblinds_ble/entity.py b/homeassistant/components/motionblinds_ble/entity.py index 5c2b3ae9afb..0b8171e7acd 100644 --- a/homeassistant/components/motionblinds_ble/entity.py +++ b/homeassistant/components/motionblinds_ble/entity.py @@ -1,4 +1,4 @@ -"""Base entities for the Motionblinds BLE integration.""" +"""Base entities for the Motionblinds Bluetooth integration.""" import logging @@ -16,7 +16,7 @@ _LOGGER = logging.getLogger(__name__) class MotionblindsBLEEntity(Entity): - """Base class for Motionblinds BLE entities.""" + """Base class for Motionblinds Bluetooth entities.""" _attr_has_entity_name = True _attr_should_poll = False diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index 2a24dd67483..aa727be13f8 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -1,6 +1,6 @@ { "domain": "motionblinds_ble", - "name": "Motionblinds BLE", + "name": "Motionblinds Bluetooth", "bluetooth": [ { "local_name": "MOTION_*", diff --git a/homeassistant/components/motionblinds_ble/select.py b/homeassistant/components/motionblinds_ble/select.py index 2ba2b8df2d4..c297c887910 100644 --- a/homeassistant/components/motionblinds_ble/select.py +++ b/homeassistant/components/motionblinds_ble/select.py @@ -1,4 +1,4 @@ -"""Select entities for the Motionblinds BLE integration.""" +"""Select entities for the Motionblinds Bluetooth integration.""" from __future__ import annotations diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 53b885ea853..b8abac5145b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3741,7 +3741,7 @@ "integration_type": "device", "config_flow": true, "iot_class": "assumed_state", - "name": "Motionblinds BLE" + "name": "Motionblinds Bluetooth" } } }, diff --git a/tests/components/motionblinds_ble/__init__.py b/tests/components/motionblinds_ble/__init__.py index 302c3266ea1..c2385555dbf 100644 --- a/tests/components/motionblinds_ble/__init__.py +++ b/tests/components/motionblinds_ble/__init__.py @@ -1 +1 @@ -"""Tests for the Motionblinds BLE integration.""" +"""Tests for the Motionblinds Bluetooth integration.""" diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index 8cd1adb1c0e..ae487957302 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -1,4 +1,4 @@ -"""Setup the MotionBlinds BLE tests.""" +"""Setup the Motionblinds Bluetooth tests.""" from unittest.mock import AsyncMock, Mock, patch diff --git a/tests/components/motionblinds_ble/test_config_flow.py b/tests/components/motionblinds_ble/test_config_flow.py index 9451e04830a..f540fdf421c 100644 --- a/tests/components/motionblinds_ble/test_config_flow.py +++ b/tests/components/motionblinds_ble/test_config_flow.py @@ -1,4 +1,4 @@ -"""Test the MotionBlinds BLE config flow.""" +"""Test the Motionblinds Bluetooth config flow.""" from unittest.mock import patch From 43562289e401af6881539a2ae646be4afd3d6f63 Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Wed, 3 Apr 2024 09:23:06 +0200 Subject: [PATCH 090/426] Bump flexit_bacnet to 2.2.1 (#114641) --- homeassistant/components/flexit_bacnet/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/manifest.json b/homeassistant/components/flexit_bacnet/manifest.json index d230e4ebb7a..40390162ce6 100644 --- a/homeassistant/components/flexit_bacnet/manifest.json +++ b/homeassistant/components/flexit_bacnet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/flexit_bacnet", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["flexit_bacnet==2.1.0"] + "requirements": ["flexit_bacnet==2.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2be277f422a..792e8b53e96 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -867,7 +867,7 @@ fixerio==1.0.0a0 fjaraskupan==2.3.0 # homeassistant.components.flexit_bacnet -flexit_bacnet==2.1.0 +flexit_bacnet==2.2.1 # homeassistant.components.flipr flipr-api==1.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b742a3fa589..c771a0e4b54 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -705,7 +705,7 @@ fivem-api==0.1.2 fjaraskupan==2.3.0 # homeassistant.components.flexit_bacnet -flexit_bacnet==2.1.0 +flexit_bacnet==2.2.1 # homeassistant.components.flipr flipr-api==1.5.1 From 7cb603a2268ba66990f91f2cc7f07bed5db4a183 Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Wed, 3 Apr 2024 03:12:00 -0400 Subject: [PATCH 091/426] Import zha quirks in the executor (#114685) --- homeassistant/components/zha/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index ef603a4ea71..de761138ce1 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -124,8 +124,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b zha_data = get_zha_data(hass) if zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True): - setup_quirks( - custom_quirks_path=zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH) + await hass.async_add_import_executor_job( + setup_quirks, zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH) ) # Load and cache device trigger information early From 7a2f6ce4305f0b26db8619143ec24002de5653de Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 3 Apr 2024 09:56:19 +0200 Subject: [PATCH 092/426] Fix Downloader config flow (#114718) --- homeassistant/components/downloader/config_flow.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index 69393c04985..635c241edc4 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -55,8 +55,9 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): async def _validate_input(self, user_input: dict[str, Any]) -> None: """Validate the user input if the directory exists.""" - if not os.path.isabs(user_input[CONF_DOWNLOAD_DIR]): - download_path = self.hass.config.path(user_input[CONF_DOWNLOAD_DIR]) + download_path = user_input[CONF_DOWNLOAD_DIR] + if not os.path.isabs(download_path): + download_path = self.hass.config.path(download_path) if not os.path.isdir(download_path): _LOGGER.error( From 35ff633d999096ffbbbdf7e4012acc8aac730f0f Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 3 Apr 2024 13:50:34 +0200 Subject: [PATCH 093/426] Avoid blocking IO in downloader config flow (#114741) --- homeassistant/components/downloader/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index 635c241edc4..15af8b56163 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -59,7 +59,7 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): if not os.path.isabs(download_path): download_path = self.hass.config.path(download_path) - if not os.path.isdir(download_path): + if not await self.hass.async_add_executor_job(os.path.isdir, download_path): _LOGGER.error( "Download path %s does not exist. File Downloader not active", download_path, From 0ca3700c16a58502d8a68be4a61d21df3598a2e2 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 3 Apr 2024 15:19:49 +0200 Subject: [PATCH 094/426] Update frontend to 20240403.0 (#114747) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 3ac7efa9fab..e2826fdb185 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240402.2"] + "requirements": ["home-assistant-frontend==20240403.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 80cea24b817..07b3aa76ebd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240402.2 +home-assistant-frontend==20240403.0 home-assistant-intents==2024.3.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 792e8b53e96..143e9e1fa53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240402.2 +home-assistant-frontend==20240403.0 # homeassistant.components.conversation home-assistant-intents==2024.3.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c771a0e4b54..2ea289f7bd3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240402.2 +home-assistant-frontend==20240403.0 # homeassistant.components.conversation home-assistant-intents==2024.3.29 From 0aa134459be89790358fc2bd73f00863495d88ab Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 3 Apr 2024 15:35:53 +0200 Subject: [PATCH 095/426] Bump version to 2024.4.0b8 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 7dc1177c62f..95fab13bb5e 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b7" +PATCH_VERSION: Final = "0b8" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 7bf5f806dd2..031afa09704 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b7" +version = "2024.4.0b8" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From b2df1b1c03069b6f20a026aae31acf40d737679c Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 3 Apr 2024 16:33:58 +0200 Subject: [PATCH 096/426] Allow passing area/device/entity IDs to floor_id and floor_name (#114748) --- homeassistant/helpers/template.py | 16 ++++ tests/helpers/test_template.py | 125 +++++++++++++++++++++++------- 2 files changed, 115 insertions(+), 26 deletions(-) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index a48f0133e84..5f692e0de89 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -1408,6 +1408,12 @@ def floor_id(hass: HomeAssistant, lookup_value: Any) -> str | None: floor_registry = fr.async_get(hass) if floor := floor_registry.async_get_floor_by_name(str(lookup_value)): return floor.floor_id + + if aid := area_id(hass, lookup_value): + area_reg = area_registry.async_get(hass) + if area := area_reg.async_get_area(aid): + return area.floor_id + return None @@ -1416,6 +1422,16 @@ def floor_name(hass: HomeAssistant, lookup_value: str) -> str | None: floor_registry = fr.async_get(hass) if floor := floor_registry.async_get_floor(lookup_value): return floor.name + + if aid := area_id(hass, lookup_value): + area_reg = area_registry.async_get(hass) + if ( + (area := area_reg.async_get_area(aid)) + and area.floor_id + and (floor := floor_registry.async_get_floor(area.floor_id)) + ): + return floor.name + return None diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 6f455c3dda4..54fdf0368eb 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -5198,17 +5198,23 @@ async def test_floors( async def test_floor_id( hass: HomeAssistant, floor_registry: fr.FloorRegistry, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test floor_id function.""" - # Test non existing floor name - info = render_to_info(hass, "{{ floor_id('Third floor') }}") - assert_result_info(info, None) - assert info.rate_limit is None + def test(value: str, expected: str | None) -> None: + info = render_to_info(hass, f"{{{{ floor_id('{value}') }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None - info = render_to_info(hass, "{{ 'Third floor' | floor_id }}") - assert_result_info(info, None) - assert info.rate_limit is None + info = render_to_info(hass, f"{{{{ '{value}' | floor_id }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + # Test non existing floor name + test("Third floor", None) # Test wrong value type info = render_to_info(hass, "{{ floor_id(42) }}") @@ -5221,28 +5227,65 @@ async def test_floor_id( # Test with an actual floor floor = floor_registry.async_create("First floor") - info = render_to_info(hass, "{{ floor_id('First floor') }}") - assert_result_info(info, floor.floor_id) - assert info.rate_limit is None + test("First floor", floor.floor_id) - info = render_to_info(hass, "{{ 'First floor' | floor_id }}") - assert_result_info(info, floor.floor_id) - assert info.rate_limit is None + config_entry = MockConfigEntry(domain="light") + config_entry.add_to_hass(hass) + area_entry_hex = area_registry.async_get_or_create("123abc") + + # Create area, device, entity and assign area to device and entity + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity_entry = entity_registry.async_get_or_create( + "light", + "hue", + "5678", + config_entry=config_entry, + device_id=device_entry.id, + ) + device_entry = device_registry.async_update_device( + device_entry.id, area_id=area_entry_hex.id + ) + entity_entry = entity_registry.async_update_entity( + entity_entry.entity_id, area_id=area_entry_hex.id + ) + + test(area_entry_hex.id, None) + test(device_entry.id, None) + test(entity_entry.entity_id, None) + + # Add floor to area + area_entry_hex = area_registry.async_update( + area_entry_hex.id, floor_id=floor.floor_id + ) + + test(area_entry_hex.id, floor.floor_id) + test(device_entry.id, floor.floor_id) + test(entity_entry.entity_id, floor.floor_id) async def test_floor_name( hass: HomeAssistant, floor_registry: fr.FloorRegistry, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test floor_name function.""" - # Test non existing floor ID - info = render_to_info(hass, "{{ floor_name('third_floor') }}") - assert_result_info(info, None) - assert info.rate_limit is None - info = render_to_info(hass, "{{ 'third_floor' | floor_name }}") - assert_result_info(info, None) - assert info.rate_limit is None + def test(value: str, expected: str | None) -> None: + info = render_to_info(hass, f"{{{{ floor_name('{value}') }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + info = render_to_info(hass, f"{{{{ '{value}' | floor_name }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + # Test non existing floor name + test("Third floor", None) # Test wrong value type info = render_to_info(hass, "{{ floor_name(42) }}") @@ -5255,13 +5298,43 @@ async def test_floor_name( # Test existing floor ID floor = floor_registry.async_create("First floor") - info = render_to_info(hass, f"{{{{ floor_name('{floor.floor_id}') }}}}") - assert_result_info(info, floor.name) - assert info.rate_limit is None + test(floor.floor_id, floor.name) - info = render_to_info(hass, f"{{{{ '{floor.floor_id}' | floor_name }}}}") - assert_result_info(info, floor.name) - assert info.rate_limit is None + config_entry = MockConfigEntry(domain="light") + config_entry.add_to_hass(hass) + area_entry_hex = area_registry.async_get_or_create("123abc") + + # Create area, device, entity and assign area to device and entity + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity_entry = entity_registry.async_get_or_create( + "light", + "hue", + "5678", + config_entry=config_entry, + device_id=device_entry.id, + ) + device_entry = device_registry.async_update_device( + device_entry.id, area_id=area_entry_hex.id + ) + entity_entry = entity_registry.async_update_entity( + entity_entry.entity_id, area_id=area_entry_hex.id + ) + + test(area_entry_hex.id, None) + test(device_entry.id, None) + test(entity_entry.entity_id, None) + + # Add floor to area + area_entry_hex = area_registry.async_update( + area_entry_hex.id, floor_id=floor.floor_id + ) + + test(area_entry_hex.id, floor.name) + test(device_entry.id, floor.name) + test(entity_entry.entity_id, floor.name) async def test_floor_areas( From 4302c5c273e56ee12487c49f1bccfee48a5733d6 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 3 Apr 2024 10:27:26 -0500 Subject: [PATCH 097/426] Bump intents (#114755) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 7f463483bf9..612e9b25c06 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.3.29"] + "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 07b3aa76ebd..4f5bc0f5a63 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 home-assistant-frontend==20240403.0 -home-assistant-intents==2024.3.29 +home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.3 diff --git a/requirements_all.txt b/requirements_all.txt index 143e9e1fa53..7980e71d5d2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1080,7 +1080,7 @@ holidays==0.46 home-assistant-frontend==20240403.0 # homeassistant.components.conversation -home-assistant-intents==2024.3.29 +home-assistant-intents==2024.4.3 # homeassistant.components.home_connect homeconnect==0.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2ea289f7bd3..2097ed21f55 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -879,7 +879,7 @@ holidays==0.46 home-assistant-frontend==20240403.0 # homeassistant.components.conversation -home-assistant-intents==2024.3.29 +home-assistant-intents==2024.4.3 # homeassistant.components.home_connect homeconnect==0.7.2 From 33f07ce035f01484857181086a7dd39d952966e1 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 3 Apr 2024 17:32:26 +0200 Subject: [PATCH 098/426] Update frontend to 20240403.1 (#114756) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index e2826fdb185..1890572bf5a 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240403.0"] + "requirements": ["home-assistant-frontend==20240403.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 4f5bc0f5a63..6bb6bd4d2d3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240403.0 +home-assistant-frontend==20240403.1 home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 7980e71d5d2..76dc587d6b9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240403.0 +home-assistant-frontend==20240403.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2097ed21f55..6f329b782aa 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240403.0 +home-assistant-frontend==20240403.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 From 6a7fad0228a94191ad46fad807dfde330fe4ee1c Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 3 Apr 2024 17:53:44 +0200 Subject: [PATCH 099/426] Fix Synology DSM setup in case no Surveillance Station permission (#114757) --- homeassistant/components/synology_dsm/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index a0c3a10774f..ec93c92a698 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -105,6 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if ( SynoSurveillanceStation.INFO_API_KEY in available_apis and SynoSurveillanceStation.HOME_MODE_API_KEY in available_apis + and api.surveillance_station is not None ): coordinator_switches = SynologyDSMSwitchUpdateCoordinator(hass, entry, api) await coordinator_switches.async_config_entry_first_refresh() From 3845523a27d245ee686aeeaec5c427ba2086bfdf Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 3 Apr 2024 17:55:24 +0200 Subject: [PATCH 100/426] Bump version to 2024.4.0b9 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 95fab13bb5e..514124e8b2e 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b8" +PATCH_VERSION: Final = "0b9" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 031afa09704..ff96331c1e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b8" +version = "2024.4.0b9" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From aa33da546df167da6157442f73c934fa5f409242 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 3 Apr 2024 19:09:39 +0200 Subject: [PATCH 101/426] Bump version to 2024.4.0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 514124e8b2e..6e08c49f970 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0b9" +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index ff96331c1e1..e0f07fac6b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0b9" +version = "2024.4.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 9ba4d26abd7d082ddb55af4a2ac456307ddaf09e Mon Sep 17 00:00:00 2001 From: IngoK1 <45150614+IngoK1@users.noreply.github.com> Date: Tue, 2 Apr 2024 00:07:02 +0200 Subject: [PATCH 102/426] Fix for Sonos URL encoding problem #102557 (#109518) * Fix for URL encoding problem #102557 Fixes the problem "Cannot play media with spaces in folder names to Sonos #102557" removing the encoding of the strings in the music library. * Fix type casting problem * Update media_browser.py to fix pr check findings Added required casting for all unquote statements to avoid further casting findings in the pr checks * Update media_browser.py Checked on linting, lets give it another try * Update media_browser.py Updated ruff run * Update media_browser.py - added version run through ruff * Update media_browser.py - ruff changes * Apply ruff formatting * Update homeassistant/components/sonos/media_browser.py Co-authored-by: jjlawren * Update homeassistant/components/sonos/media_browser.py Co-authored-by: jjlawren * Update homeassistant/components/sonos/media_browser.py Co-authored-by: jjlawren * Update homeassistant/components/sonos/media_browser.py Co-authored-by: jjlawren --------- Co-authored-by: computeq-admin <51021172+computeq-admin@users.noreply.github.com> Co-authored-by: Jason Lawrence --- homeassistant/components/sonos/media_browser.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index 87ee3ed3b4d..6e6f388ed50 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -7,6 +7,7 @@ from contextlib import suppress from functools import partial import logging from typing import cast +import urllib.parse from soco.data_structures import DidlObject from soco.ms_data_structures import MusicServiceItem @@ -60,12 +61,14 @@ def get_thumbnail_url_full( media_content_id, media_content_type, ) - return getattr(item, "album_art_uri", None) + return urllib.parse.unquote(getattr(item, "album_art_uri", "")) - return get_browse_image_url( - media_content_type, - media_content_id, - media_image_id, + return urllib.parse.unquote( + get_browse_image_url( + media_content_type, + media_content_id, + media_image_id, + ) ) @@ -166,6 +169,7 @@ def build_item_response( payload["idstring"] = "A:ALBUMARTIST/" + "/".join( payload["idstring"].split("/")[2:] ) + payload["idstring"] = urllib.parse.unquote(payload["idstring"]) try: search_type = MEDIA_TYPES_TO_SONOS[payload["search_type"]] @@ -510,7 +514,7 @@ def get_media( if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM: item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:]) - search_term = item_id.split("/")[-1] + search_term = urllib.parse.unquote(item_id.split("/")[-1]) matches = media_library.get_music_library_information( search_type, search_term=search_term, full_album_art_uri=True ) From 590546a9a56eb817739c789ed19c3ceec6ca3653 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Thu, 28 Mar 2024 12:07:55 +0100 Subject: [PATCH 103/426] Use `setup_test_component_platform` helper for sensor entity component tests instead of `hass.components` (#114316) * Use `setup_test_component_platform` helper for sensor entity component tests instead of `hass.components` * Missing file * Fix import * Remove invalid device class --- tests/common.py | 14 +- tests/components/conftest.py | 9 + tests/components/mqtt/test_init.py | 14 +- .../sensor.py => components/sensor/common.py} | 49 +-- .../sensor/test_device_condition.py | 32 +- .../components/sensor/test_device_trigger.py | 32 +- tests/components/sensor/test_init.py | 360 +++++++----------- tests/components/sensor/test_recorder.py | 15 +- 8 files changed, 219 insertions(+), 306 deletions(-) rename tests/{testing_config/custom_components/test/sensor.py => components/sensor/common.py} (84%) diff --git a/tests/common.py b/tests/common.py index a7d4cf6b83a..0ac0ee4556b 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1461,7 +1461,10 @@ def mock_integration( def mock_platform( - hass: HomeAssistant, platform_path: str, module: Mock | MockPlatform | None = None + hass: HomeAssistant, + platform_path: str, + module: Mock | MockPlatform | None = None, + built_in=True, ) -> None: """Mock a platform. @@ -1472,7 +1475,7 @@ def mock_platform( module_cache = hass.data[loader.DATA_COMPONENTS] if domain not in integration_cache: - mock_integration(hass, MockModule(domain)) + mock_integration(hass, MockModule(domain), built_in=built_in) integration_cache[domain]._top_level_files.add(f"{platform_name}.py") _LOGGER.info("Adding mock integration platform: %s", platform_path) @@ -1665,6 +1668,7 @@ def setup_test_component_platform( domain: str, entities: Sequence[Entity], from_config_entry: bool = False, + built_in: bool = True, ) -> MockPlatform: """Mock a test component platform for tests.""" @@ -1695,9 +1699,5 @@ def setup_test_component_platform( platform.async_setup_entry = _async_setup_entry platform.async_setup_platform = None - mock_platform( - hass, - f"test.{domain}", - platform, - ) + mock_platform(hass, f"test.{domain}", platform, built_in=built_in) return platform diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 4669e17c8e7..d84fb3600ab 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -10,6 +10,7 @@ from homeassistant.const import STATE_OFF, STATE_ON if TYPE_CHECKING: from tests.components.light.common import MockLight + from tests.components.sensor.common import MockSensor @pytest.fixture(scope="session", autouse=True) @@ -118,3 +119,11 @@ def mock_light_entities() -> list["MockLight"]: MockLight("Ceiling", STATE_OFF), MockLight(None, STATE_OFF), ] + + +@pytest.fixture +def mock_sensor_entities() -> dict[str, "MockSensor"]: + """Return mocked sensor entities.""" + from tests.components.sensor.common import get_mock_sensor_entities + + return get_mock_sensor_entities() diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 3459e6fc058..a9f2ba4354b 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -23,6 +23,7 @@ from homeassistant.components.mqtt.models import ( MqttValueTemplateException, ReceiveMessage, ) +from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import ( ATTR_ASSUMED_STATE, @@ -52,10 +53,9 @@ from tests.common import ( async_fire_mqtt_message, async_fire_time_changed, mock_restore_cache, + setup_test_component_platform, ) -from tests.testing_config.custom_components.test.sensor import ( # type: ignore[attr-defined] - DEVICE_CLASSES, -) +from tests.components.sensor.common import MockSensor from tests.typing import ( MqttMockHAClient, MqttMockHAClientGenerator, @@ -3142,12 +3142,12 @@ async def test_debug_info_non_mqtt( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + mock_sensor_entities: dict[str, MockSensor], ) -> None: """Test we get empty debug_info for a device with non MQTT entities.""" await mqtt_mock_entry() domain = "sensor" - platform = getattr(hass.components, f"test.{domain}") - platform.init() + setup_test_component_platform(hass, domain, mock_sensor_entities) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -3155,11 +3155,11 @@ async def test_debug_info_non_mqtt( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - for device_class in DEVICE_CLASSES: + for device_class in SensorDeviceClass: entity_registry.async_get_or_create( domain, "test", - platform.ENTITIES[device_class].unique_id, + mock_sensor_entities[device_class].unique_id, device_id=device_entry.id, ) diff --git a/tests/testing_config/custom_components/test/sensor.py b/tests/components/sensor/common.py similarity index 84% rename from tests/testing_config/custom_components/test/sensor.py rename to tests/components/sensor/common.py index 9ebf16b9dcd..53a93b73da3 100644 --- a/tests/testing_config/custom_components/test/sensor.py +++ b/tests/components/sensor/common.py @@ -1,10 +1,6 @@ -"""Provide a mock sensor platform. - -Call init before using it in your tests to ensure clean test data. -""" +"""Common test utilities for sensor entity component tests.""" from homeassistant.components.sensor import ( - DEVICE_CLASSES, RestoreSensor, SensorDeviceClass, SensorEntity, @@ -24,8 +20,6 @@ from homeassistant.const import ( from tests.common import MockEntity -DEVICE_CLASSES.append("none") - UNITS_OF_MEASUREMENT = { SensorDeviceClass.APPARENT_POWER: UnitOfApparentPower.VOLT_AMPERE, # apparent power (VA) SensorDeviceClass.BATTERY: PERCENTAGE, # % of battery that is left @@ -56,34 +50,6 @@ UNITS_OF_MEASUREMENT = { SensorDeviceClass.GAS: UnitOfVolume.CUBIC_METERS, # gas (m³) } -ENTITIES = {} - - -def init(empty=False): - """Initialize the platform with entities.""" - global ENTITIES - - ENTITIES = ( - {} - if empty - else { - device_class: MockSensor( - name=f"{device_class} sensor", - unique_id=f"unique_{device_class}", - device_class=device_class, - native_unit_of_measurement=UNITS_OF_MEASUREMENT.get(device_class), - ) - for device_class in DEVICE_CLASSES - } - ) - - -async def async_setup_platform( - hass, config, async_add_entities_callback, discovery_info=None -): - """Return mock entities.""" - async_add_entities_callback(list(ENTITIES.values())) - class MockSensor(MockEntity, SensorEntity): """Mock Sensor class.""" @@ -141,3 +107,16 @@ class MockRestoreSensor(MockSensor, RestoreSensor): self._values["native_unit_of_measurement"] = ( last_sensor_data.native_unit_of_measurement ) + + +def get_mock_sensor_entities() -> dict[str, MockSensor]: + """Get mock sensor entities.""" + return { + device_class: MockSensor( + name=f"{device_class} sensor", + unique_id=f"unique_{device_class}", + device_class=device_class, + native_unit_of_measurement=UNITS_OF_MEASUREMENT.get(device_class), + ) + for device_class in SensorDeviceClass + } diff --git a/tests/components/sensor/test_device_condition.py b/tests/components/sensor/test_device_condition.py index 7263154c1dc..b633c744205 100644 --- a/tests/components/sensor/test_device_condition.py +++ b/tests/components/sensor/test_device_condition.py @@ -26,8 +26,9 @@ from tests.common import ( async_get_device_automation_capabilities, async_get_device_automations, async_mock_service, + setup_test_component_platform, ) -from tests.testing_config.custom_components.test.sensor import UNITS_OF_MEASUREMENT +from tests.components.sensor.common import UNITS_OF_MEASUREMENT, MockSensor @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -85,11 +86,10 @@ async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - enable_custom_integrations: None, + mock_sensor_entities: dict[str, MockSensor], ) -> None: """Test we get the expected conditions from a sensor.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities.values()) assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() sensor_entries = {} @@ -104,7 +104,7 @@ async def test_get_conditions( sensor_entries[device_class] = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES[device_class].unique_id, + mock_sensor_entities[device_class].unique_id, device_id=device_entry.id, ) @@ -284,6 +284,7 @@ async def test_get_condition_capabilities( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + mock_sensor_entities: dict[str, MockSensor], set_state, device_class_reg, device_class_state, @@ -291,8 +292,7 @@ async def test_get_condition_capabilities( unit_state, ) -> None: """Test we get the expected capabilities from a sensor condition.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities.values()) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -303,7 +303,7 @@ async def test_get_condition_capabilities( entity_id = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["battery"].unique_id, + mock_sensor_entities["battery"].unique_id, device_id=device_entry.id, original_device_class=device_class_reg, unit_of_measurement=unit_reg, @@ -353,6 +353,7 @@ async def test_get_condition_capabilities_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + mock_sensor_entities: dict[str, MockSensor], set_state, device_class_reg, device_class_state, @@ -360,8 +361,7 @@ async def test_get_condition_capabilities_legacy( unit_state, ) -> None: """Test we get the expected capabilities from a sensor condition.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities.values()) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -372,7 +372,7 @@ async def test_get_condition_capabilities_legacy( entity_id = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["battery"].unique_id, + mock_sensor_entities["battery"].unique_id, device_id=device_entry.id, original_device_class=device_class_reg, unit_of_measurement=unit_reg, @@ -417,11 +417,13 @@ async def test_get_condition_capabilities_legacy( async def test_get_condition_capabilities_none( hass: HomeAssistant, entity_registry: er.EntityRegistry, - enable_custom_integrations: None, ) -> None: """Test we get the expected capabilities from a sensor condition.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + entity = MockSensor( + name="none sensor", + unique_id="unique_none", + ) + setup_test_component_platform(hass, DOMAIN, [entity]) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -429,7 +431,7 @@ async def test_get_condition_capabilities_none( entry_none = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["none"].unique_id, + entity.unique_id, ) assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) diff --git a/tests/components/sensor/test_device_trigger.py b/tests/components/sensor/test_device_trigger.py index 4193adc9299..98bea960fcc 100644 --- a/tests/components/sensor/test_device_trigger.py +++ b/tests/components/sensor/test_device_trigger.py @@ -30,8 +30,9 @@ from tests.common import ( async_get_device_automation_capabilities, async_get_device_automations, async_mock_service, + setup_test_component_platform, ) -from tests.testing_config.custom_components.test.sensor import UNITS_OF_MEASUREMENT +from tests.components.sensor.common import UNITS_OF_MEASUREMENT, MockSensor @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -87,11 +88,10 @@ async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - enable_custom_integrations: None, + mock_sensor_entities: dict[str, MockSensor], ) -> None: """Test we get the expected triggers from a sensor.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities.values()) assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() sensor_entries: dict[SensorDeviceClass, er.RegistryEntry] = {} @@ -106,7 +106,7 @@ async def test_get_triggers( sensor_entries[device_class] = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES[device_class].unique_id, + mock_sensor_entities[device_class].unique_id, device_id=device_entry.id, ) @@ -241,6 +241,7 @@ async def test_get_trigger_capabilities( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + mock_sensor_entities: dict[str, MockSensor], set_state, device_class_reg, device_class_state, @@ -248,8 +249,7 @@ async def test_get_trigger_capabilities( unit_state, ) -> None: """Test we get the expected capabilities from a sensor trigger.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -260,7 +260,7 @@ async def test_get_trigger_capabilities( entity_id = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["battery"].unique_id, + mock_sensor_entities["battery"].unique_id, device_id=device_entry.id, original_device_class=device_class_reg, unit_of_measurement=unit_reg, @@ -311,6 +311,7 @@ async def test_get_trigger_capabilities_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + mock_sensor_entities: dict[str, MockSensor], set_state, device_class_reg, device_class_state, @@ -318,8 +319,7 @@ async def test_get_trigger_capabilities_legacy( unit_state, ) -> None: """Test we get the expected capabilities from a sensor trigger.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + setup_test_component_platform(hass, DOMAIN, mock_sensor_entities) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -330,7 +330,7 @@ async def test_get_trigger_capabilities_legacy( entity_id = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["battery"].unique_id, + mock_sensor_entities["battery"].unique_id, device_id=device_entry.id, original_device_class=device_class_reg, unit_of_measurement=unit_reg, @@ -374,11 +374,13 @@ async def test_get_trigger_capabilities_legacy( async def test_get_trigger_capabilities_none( hass: HomeAssistant, entity_registry: er.EntityRegistry, - enable_custom_integrations: None, ) -> None: """Test we get the expected capabilities from a sensor trigger.""" - platform = getattr(hass.components, f"test.{DOMAIN}") - platform.init() + entity = MockSensor( + name="none sensor", + unique_id="unique_none", + ) + setup_test_component_platform(hass, DOMAIN, [entity]) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -386,7 +388,7 @@ async def test_get_trigger_capabilities_none( entry_none = entity_registry.async_get_or_create( DOMAIN, "test", - platform.ENTITIES["none"].unique_id, + entity.unique_id, ) assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 59df07bb0b9..0ecb4b9c60f 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -63,7 +63,9 @@ from tests.common import ( mock_integration, mock_platform, mock_restore_cache_with_extra_data, + setup_test_component_platform, ) +from tests.components.sensor.common import MockRestoreSensor, MockSensor TEST_DOMAIN = "test" @@ -103,7 +105,6 @@ TEST_DOMAIN = "test" ) async def test_temperature_conversion( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, state_unit, @@ -112,16 +113,14 @@ async def test_temperature_conversion( ) -> None: """Test temperature conversion.""" hass.config.units = unit_system - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=str(native_value), native_unit_of_measurement=native_unit, device_class=SensorDeviceClass.TEMPERATURE, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -132,19 +131,17 @@ async def test_temperature_conversion( @pytest.mark.parametrize("device_class", [None, SensorDeviceClass.PRESSURE]) async def test_temperature_conversion_wrong_device_class( - hass: HomeAssistant, device_class, enable_custom_integrations: None + hass: HomeAssistant, device_class ) -> None: """Test temperatures are not converted if the sensor has wrong device class.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value="0.0", native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT, device_class=device_class, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -158,21 +155,19 @@ async def test_temperature_conversion_wrong_device_class( async def test_deprecated_last_reset( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, state_class, ) -> None: """Test warning on deprecated last reset.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", state_class=state_class, last_reset=dt_util.utc_from_timestamp(0) ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() assert ( - "Entity sensor.test () " + "Entity sensor.test () " f"with state_class {state_class} has set last_reset. Setting last_reset for " "entities with state_class other than 'total' is not supported. Please update " "your configuration if state_class is manually configured." @@ -185,7 +180,6 @@ async def test_deprecated_last_reset( async def test_datetime_conversion( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test conversion of datetime.""" test_timestamp = datetime(2017, 12, 19, 18, 29, 42, tzinfo=UTC) @@ -193,51 +187,49 @@ async def test_datetime_conversion( dt_util.get_time_zone("Europe/Amsterdam") ) test_date = date(2017, 12, 19) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( - name="Test", - native_value=test_timestamp, - device_class=SensorDeviceClass.TIMESTAMP, - ) - platform.ENTITIES["1"] = platform.MockSensor( - name="Test", native_value=test_date, device_class=SensorDeviceClass.DATE - ) - platform.ENTITIES["2"] = platform.MockSensor( - name="Test", native_value=None, device_class=SensorDeviceClass.TIMESTAMP - ) - platform.ENTITIES["3"] = platform.MockSensor( - name="Test", native_value=None, device_class=SensorDeviceClass.DATE - ) - platform.ENTITIES["4"] = platform.MockSensor( - name="Test", - native_value=test_local_timestamp, - device_class=SensorDeviceClass.TIMESTAMP, - ) + entities = [ + MockSensor( + name="Test", + native_value=test_timestamp, + device_class=SensorDeviceClass.TIMESTAMP, + ), + MockSensor( + name="Test", native_value=test_date, device_class=SensorDeviceClass.DATE + ), + MockSensor( + name="Test", native_value=None, device_class=SensorDeviceClass.TIMESTAMP + ), + MockSensor(name="Test", native_value=None, device_class=SensorDeviceClass.DATE), + MockSensor( + name="Test", + native_value=test_local_timestamp, + device_class=SensorDeviceClass.TIMESTAMP, + ), + ] + setup_test_component_platform(hass, sensor.DOMAIN, entities) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() - state = hass.states.get(platform.ENTITIES["0"].entity_id) + state = hass.states.get(entities[0].entity_id) assert state.state == test_timestamp.isoformat() - state = hass.states.get(platform.ENTITIES["1"].entity_id) + state = hass.states.get(entities[1].entity_id) assert state.state == test_date.isoformat() - state = hass.states.get(platform.ENTITIES["2"].entity_id) + state = hass.states.get(entities[2].entity_id) assert state.state == STATE_UNKNOWN - state = hass.states.get(platform.ENTITIES["3"].entity_id) + state = hass.states.get(entities[3].entity_id) assert state.state == STATE_UNKNOWN - state = hass.states.get(platform.ENTITIES["4"].entity_id) + state = hass.states.get(entities[4].entity_id) assert state.state == test_timestamp.isoformat() async def test_a_sensor_with_a_non_numeric_device_class( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test that a sensor with a non numeric device class will be non numeric. @@ -249,29 +241,29 @@ async def test_a_sensor_with_a_non_numeric_device_class( dt_util.get_time_zone("Europe/Amsterdam") ) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( - name="Test", - native_value=test_local_timestamp, - native_unit_of_measurement="", - device_class=SensorDeviceClass.TIMESTAMP, - ) - - platform.ENTITIES["1"] = platform.MockSensor( - name="Test", - native_value=test_local_timestamp, - state_class="", - device_class=SensorDeviceClass.TIMESTAMP, - ) + entities = [ + MockSensor( + name="Test", + native_value=test_local_timestamp, + native_unit_of_measurement="", + device_class=SensorDeviceClass.TIMESTAMP, + ), + MockSensor( + name="Test", + native_value=test_local_timestamp, + state_class="", + device_class=SensorDeviceClass.TIMESTAMP, + ), + ] + setup_test_component_platform(hass, sensor.DOMAIN, entities) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() - state = hass.states.get(platform.ENTITIES["0"].entity_id) + state = hass.states.get(entities[0].entity_id) assert state.state == test_timestamp.isoformat() - state = hass.states.get(platform.ENTITIES["1"].entity_id) + state = hass.states.get(entities[1].entity_id) assert state.state == test_timestamp.isoformat() @@ -285,17 +277,15 @@ async def test_a_sensor_with_a_non_numeric_device_class( async def test_deprecated_datetime_str( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, device_class, state_value, provides, ) -> None: """Test warning on deprecated str for a date(time) value.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=state_value, device_class=device_class ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -309,17 +299,15 @@ async def test_deprecated_datetime_str( async def test_reject_timezoneless_datetime_str( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test rejection of timezone-less datetime objects as timestamp.""" test_timestamp = datetime(2017, 12, 19, 18, 29, 42, tzinfo=None) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=test_timestamp, device_class=SensorDeviceClass.TIMESTAMP, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -403,7 +391,6 @@ RESTORE_DATA = { ) async def test_restore_sensor_save_state( hass: HomeAssistant, - enable_custom_integrations: None, hass_storage: dict[str, Any], native_value, native_value_type, @@ -412,16 +399,14 @@ async def test_restore_sensor_save_state( uom, ) -> None: """Test RestoreSensor.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockRestoreSensor( + entity0 = MockRestoreSensor( name="Test", native_value=native_value, native_unit_of_measurement=uom, device_class=device_class, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -472,7 +457,6 @@ async def test_restore_sensor_save_state( ) async def test_restore_sensor_restore_state( hass: HomeAssistant, - enable_custom_integrations: None, hass_storage: dict[str, Any], native_value, native_value_type, @@ -483,14 +467,12 @@ async def test_restore_sensor_restore_state( """Test RestoreSensor.""" mock_restore_cache_with_extra_data(hass, ((State("sensor.test", ""), extra_data),)) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockRestoreSensor( + entity0 = MockRestoreSensor( name="Test", device_class=device_class, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -621,7 +603,6 @@ async def test_restore_sensor_restore_state( ) async def test_custom_unit( hass: HomeAssistant, - enable_custom_integrations: None, device_class, native_unit, custom_unit, @@ -638,17 +619,15 @@ async def test_custom_unit( ) await hass.async_block_till_done() - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=str(native_value), native_unit_of_measurement=native_unit, device_class=device_class, unique_id="very_unique", ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -884,7 +863,6 @@ async def test_custom_unit( ) async def test_custom_unit_change( hass: HomeAssistant, - enable_custom_integrations: None, native_unit, custom_unit, state_unit, @@ -895,17 +873,15 @@ async def test_custom_unit_change( ) -> None: """Test custom unit changes are picked up.""" entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=str(native_value), native_unit_of_measurement=native_unit, device_class=device_class, unique_id="very_unique", ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) - entity0 = platform.ENTITIES["0"] assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -972,7 +948,6 @@ async def test_custom_unit_change( ) async def test_unit_conversion_priority( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, automatic_unit, @@ -990,27 +965,21 @@ async def test_unit_conversion_priority( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] - - platform.ENTITIES["1"] = platform.MockSensor( + entity1 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), ) - entity1 = platform.ENTITIES["1"] - - platform.ENTITIES["2"] = platform.MockSensor( + entity2 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1018,16 +987,23 @@ async def test_unit_conversion_priority( suggested_unit_of_measurement=suggested_unit, unique_id="very_unique_2", ) - entity2 = platform.ENTITIES["2"] - - platform.ENTITIES["3"] = platform.MockSensor( + entity3 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), suggested_unit_of_measurement=suggested_unit, ) - entity3 = platform.ENTITIES["3"] + setup_test_component_platform( + hass, + sensor.DOMAIN, + [ + entity0, + entity1, + entity2, + entity3, + ], + ) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1119,7 +1095,6 @@ async def test_unit_conversion_priority( ) async def test_unit_conversion_priority_precision( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, automatic_unit, @@ -1138,10 +1113,8 @@ async def test_unit_conversion_priority_precision( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1149,18 +1122,14 @@ async def test_unit_conversion_priority_precision( suggested_display_precision=suggested_precision, unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] - - platform.ENTITIES["1"] = platform.MockSensor( + entity1 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), suggested_display_precision=suggested_precision, ) - entity1 = platform.ENTITIES["1"] - - platform.ENTITIES["2"] = platform.MockSensor( + entity2 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1169,9 +1138,7 @@ async def test_unit_conversion_priority_precision( suggested_unit_of_measurement=suggested_unit, unique_id="very_unique_2", ) - entity2 = platform.ENTITIES["2"] - - platform.ENTITIES["3"] = platform.MockSensor( + entity3 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1179,7 +1146,16 @@ async def test_unit_conversion_priority_precision( suggested_display_precision=suggested_precision, suggested_unit_of_measurement=suggested_unit, ) - entity3 = platform.ENTITIES["3"] + setup_test_component_platform( + hass, + sensor.DOMAIN, + [ + entity0, + entity1, + entity2, + entity3, + ], + ) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1280,7 +1256,6 @@ async def test_unit_conversion_priority_precision( ) async def test_unit_conversion_priority_suggested_unit_change( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, original_unit, @@ -1294,8 +1269,6 @@ async def test_unit_conversion_priority_suggested_unit_change( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) # Pre-register entities entry = entity_registry.async_get_or_create( @@ -1315,16 +1288,14 @@ async def test_unit_conversion_priority_suggested_unit_change( {"suggested_unit_of_measurement": original_unit}, ) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] - - platform.ENTITIES["1"] = platform.MockSensor( + entity1 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1332,7 +1303,7 @@ async def test_unit_conversion_priority_suggested_unit_change( suggested_unit_of_measurement=suggested_unit, unique_id="very_unique_2", ) - entity1 = platform.ENTITIES["1"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0, entity1]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1392,7 +1363,6 @@ async def test_unit_conversion_priority_suggested_unit_change( ) async def test_unit_conversion_priority_suggested_unit_change_2( hass: HomeAssistant, - enable_custom_integrations: None, native_unit_1, native_unit_2, suggested_unit, @@ -1405,8 +1375,6 @@ async def test_unit_conversion_priority_suggested_unit_change_2( hass.config.units = METRIC_SYSTEM entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) # Pre-register entities entity_registry.async_get_or_create( @@ -1416,16 +1384,14 @@ async def test_unit_conversion_priority_suggested_unit_change_2( "sensor", "test", "very_unique_2", unit_of_measurement=native_unit_1 ) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit_2, native_value=str(native_value), unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] - - platform.ENTITIES["1"] = platform.MockSensor( + entity1 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit_2, @@ -1433,7 +1399,7 @@ async def test_unit_conversion_priority_suggested_unit_change_2( suggested_unit_of_measurement=suggested_unit, unique_id="very_unique_2", ) - entity1 = platform.ENTITIES["1"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0, entity1]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1496,7 +1462,6 @@ async def test_unit_conversion_priority_suggested_unit_change_2( ) async def test_suggested_precision_option( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, integration_suggested_precision, @@ -1510,10 +1475,7 @@ async def test_suggested_precision_option( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1521,7 +1483,7 @@ async def test_suggested_precision_option( suggested_display_precision=integration_suggested_precision, unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1574,7 +1536,6 @@ async def test_suggested_precision_option( ) async def test_suggested_precision_option_update( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, suggested_unit, @@ -1590,8 +1551,6 @@ async def test_suggested_precision_option_update( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) # Pre-register entities entry = entity_registry.async_get_or_create("sensor", "test", "very_unique") @@ -1610,7 +1569,7 @@ async def test_suggested_precision_option_update( }, ) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, @@ -1618,7 +1577,7 @@ async def test_suggested_precision_option_update( suggested_display_precision=new_precision, unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1666,7 +1625,6 @@ async def test_suggested_precision_option_update( ) async def test_unit_conversion_priority_legacy_conversion_removed( hass: HomeAssistant, - enable_custom_integrations: None, unit_system, native_unit, original_unit, @@ -1679,22 +1637,20 @@ async def test_unit_conversion_priority_legacy_conversion_removed( hass.config.units = unit_system entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) # Pre-register entities entity_registry.async_get_or_create( "sensor", "test", "very_unique", unit_of_measurement=original_unit ) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=native_unit, native_value=str(native_value), unique_id="very_unique", ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1715,17 +1671,15 @@ def test_device_classes_aligned() -> None: async def test_value_unknown_in_enumeration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test warning on invalid enum value.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value="invalid_option", device_class=SensorDeviceClass.ENUM, options=["option1", "option2"], ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1739,17 +1693,15 @@ async def test_value_unknown_in_enumeration( async def test_invalid_enumeration_entity_with_device_class( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test warning on entities that provide an enum with a device class.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=21, device_class=SensorDeviceClass.POWER, options=["option1", "option2"], ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1763,16 +1715,14 @@ async def test_invalid_enumeration_entity_with_device_class( async def test_invalid_enumeration_entity_without_device_class( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test warning on entities that provide an enum without a device class.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=21, options=["option1", "option2"], ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1794,19 +1744,17 @@ async def test_invalid_enumeration_entity_without_device_class( async def test_non_numeric_device_class_with_unit_of_measurement( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, device_class: SensorDeviceClass, ) -> None: """Test error on numeric entities that provide an unit of measurement.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=None, device_class=device_class, native_unit_of_measurement=UnitOfTemperature.CELSIUS, options=["option1", "option2"], ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1869,18 +1817,16 @@ async def test_non_numeric_device_class_with_unit_of_measurement( async def test_device_classes_with_invalid_unit_of_measurement( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, device_class: SensorDeviceClass, ) -> None: """Test error when unit of measurement is not valid for used device class.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value="1.0", device_class=device_class, native_unit_of_measurement="INVALID!", ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) units = [ str(unit) if unit else "no unit of measurement" for unit in DEVICE_CLASS_UNITS.get(device_class, set()) @@ -1920,7 +1866,6 @@ async def test_device_classes_with_invalid_unit_of_measurement( async def test_non_numeric_validation_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, native_value: Any, problem: str, device_class: SensorDeviceClass | None, @@ -1928,16 +1873,14 @@ async def test_non_numeric_validation_error( unit: str | None, ) -> None: """Test error on expected numeric entities.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=native_value, device_class=device_class, native_unit_of_measurement=unit, state_class=state_class, ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -1966,7 +1909,6 @@ async def test_non_numeric_validation_error( async def test_non_numeric_validation_raise( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, native_value: Any, expected: str, device_class: SensorDeviceClass | None, @@ -1975,9 +1917,7 @@ async def test_non_numeric_validation_raise( precision, ) -> None: """Test error on expected numeric entities.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", device_class=device_class, native_unit_of_measurement=unit, @@ -1985,7 +1925,7 @@ async def test_non_numeric_validation_raise( state_class=state_class, suggested_display_precision=precision, ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2018,7 +1958,6 @@ async def test_non_numeric_validation_raise( async def test_numeric_validation( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, native_value: Any, expected: str, device_class: SensorDeviceClass | None, @@ -2026,16 +1965,14 @@ async def test_numeric_validation( unit: str | None, ) -> None: """Test does not error on expected numeric entities.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=native_value, device_class=device_class, native_unit_of_measurement=unit, state_class=state_class, ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2052,18 +1989,15 @@ async def test_numeric_validation( async def test_numeric_validation_ignores_custom_device_class( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, ) -> None: """Test does not error on expected numeric entities.""" native_value = "Three elephants" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=native_value, device_class="custom__deviceclass", ) - entity0 = platform.ENTITIES["0"] + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2084,18 +2018,16 @@ async def test_numeric_validation_ignores_custom_device_class( async def test_device_classes_with_invalid_state_class( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, device_class: SensorDeviceClass, ) -> None: """Test error when unit of measurement is not valid for used device class.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=None, state_class="INVALID!", device_class=device_class, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2133,7 +2065,6 @@ async def test_device_classes_with_invalid_state_class( async def test_numeric_state_expected_helper( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - enable_custom_integrations: None, device_class: SensorDeviceClass | None, state_class: SensorStateClass | None, native_unit_of_measurement: str | None, @@ -2141,9 +2072,7 @@ async def test_numeric_state_expected_helper( is_numeric: bool, ) -> None: """Test numeric_state_expected helper.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( name="Test", native_value=None, device_class=device_class, @@ -2151,11 +2080,11 @@ async def test_numeric_state_expected_helper( native_unit_of_measurement=native_unit_of_measurement, suggested_display_precision=suggested_precision, ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() - entity0 = platform.ENTITIES["0"] state = hass.states.get(entity0.entity_id) assert state is not None @@ -2199,7 +2128,6 @@ async def test_numeric_state_expected_helper( ) async def test_unit_conversion_update( hass: HomeAssistant, - enable_custom_integrations: None, unit_system_1, unit_system_2, native_unit, @@ -2219,9 +2147,8 @@ async def test_unit_conversion_update( hass.config.units = unit_system_1 entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - entity0 = platform.MockSensor( + entity0 = MockSensor( name="Test 0", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2229,7 +2156,7 @@ async def test_unit_conversion_update( unique_id="very_unique", ) - entity1 = platform.MockSensor( + entity1 = MockSensor( name="Test 1", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2237,7 +2164,7 @@ async def test_unit_conversion_update( unique_id="very_unique_1", ) - entity2 = platform.MockSensor( + entity2 = MockSensor( name="Test 2", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2246,7 +2173,7 @@ async def test_unit_conversion_update( unique_id="very_unique_2", ) - entity3 = platform.MockSensor( + entity3 = MockSensor( name="Test 3", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2255,7 +2182,7 @@ async def test_unit_conversion_update( unique_id="very_unique_3", ) - entity4 = platform.MockSensor( + entity4 = MockSensor( name="Test 4", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2544,11 +2471,8 @@ async def test_entity_category_config_raises_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test error is raised when entity category is set to config.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( - name="Test", entity_category=EntityCategory.CONFIG - ) + entity0 = MockSensor(name="Test", entity_category=EntityCategory.CONFIG) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2644,13 +2568,11 @@ async def test_suggested_unit_guard_invalid_unit( An invalid suggested unit creates a log entry and the suggested unit will be ignored. """ entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) state_value = 10 invalid_suggested_unit = "invalid_unit" - entity = platform.ENTITIES["0"] = platform.MockSensor( + entity = MockSensor( name="Invalid", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2658,6 +2580,7 @@ async def test_suggested_unit_guard_invalid_unit( native_value=str(state_value), unique_id="invalid", ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() @@ -2674,10 +2597,10 @@ async def test_suggested_unit_guard_invalid_unit( "homeassistant.components.sensor", logging.WARNING, ( - " sets an" - " invalid suggested_unit_of_measurement. Please report it to the author" - " of the 'test' custom integration. This warning will become an error in" - " Home Assistant Core 2024.5" + " sets an" + " invalid suggested_unit_of_measurement. Please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test%22." + " This warning will become an error in Home Assistant Core 2024.5" ), ) in caplog.record_tuples @@ -2715,10 +2638,8 @@ async def test_suggested_unit_guard_valid_unit( in the entity registry. """ entity_registry = er.async_get(hass) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - entity = platform.ENTITIES["0"] = platform.MockSensor( + entity = MockSensor( name="Valid", device_class=device_class, native_unit_of_measurement=native_unit, @@ -2726,6 +2647,7 @@ async def test_suggested_unit_guard_valid_unit( suggested_unit_of_measurement=suggested_unit, unique_id="valid", ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 40b38b2e57a..8084fe69e89 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -33,13 +33,14 @@ from homeassistant.components.recorder.statistics import ( list_statistic_ids, ) from homeassistant.components.recorder.util import get_instance, session_scope -from homeassistant.components.sensor import ATTR_OPTIONS, SensorDeviceClass +from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component, setup_component import homeassistant.util.dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM +from tests.common import setup_test_component_platform from tests.components.recorder.common import ( assert_dict_of_states_equal_without_context_and_last_changed, assert_multiple_states_equal_without_context_and_last_changed, @@ -49,6 +50,7 @@ from tests.components.recorder.common import ( statistics_during_period, wait_recording_done, ) +from tests.components.sensor.common import MockSensor from tests.typing import WebSocketGenerator BATTERY_SENSOR_ATTRIBUTES = { @@ -1363,11 +1365,9 @@ def test_compile_hourly_sum_statistics_negative_state( hass = hass_recorder() hass.data.pop(loader.DATA_CUSTOM_COMPONENTS) - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - mocksensor = platform.MockSensor(name="custom_sensor") + mocksensor = MockSensor(name="custom_sensor") mocksensor._attr_should_poll = False - platform.ENTITIES["custom_sensor"] = mocksensor + setup_test_component_platform(hass, DOMAIN, [mocksensor], built_in=False) setup_component(hass, "homeassistant", {}) setup_component( @@ -5178,9 +5178,7 @@ async def test_exclude_attributes( recorder_mock: Recorder, hass: HomeAssistant, enable_custom_integrations: None ) -> None: """Test sensor attributes to be excluded.""" - platform = getattr(hass.components, "test.sensor") - platform.init(empty=True) - platform.ENTITIES["0"] = platform.MockSensor( + entity0 = MockSensor( has_entity_name=True, unique_id="test", name="Test", @@ -5188,6 +5186,7 @@ async def test_exclude_attributes( device_class=SensorDeviceClass.ENUM, options=["option1", "option2"], ) + setup_test_component_platform(hass, DOMAIN, [entity0]) assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) await hass.async_block_till_done() await async_wait_recording_done(hass) From d983fa6da7fd32aa64cbf633d0f70de0da11202a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Thu, 4 Apr 2024 09:05:08 +0200 Subject: [PATCH 104/426] Update aioairzone-cloud to v0.4.7 (#114761) --- homeassistant/components/airzone_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 14f02620c91..b4445f6fe45 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.4.6"] + "requirements": ["aioairzone-cloud==0.4.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 76dc587d6b9..d7b500abdd3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -185,7 +185,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.4.6 +aioairzone-cloud==0.4.7 # homeassistant.components.airzone aioairzone==0.7.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6f329b782aa..82aae896b59 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -164,7 +164,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.4.6 +aioairzone-cloud==0.4.7 # homeassistant.components.airzone aioairzone==0.7.6 From 25289e0ca19c9a9382dda561854d5978b244e399 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Fri, 5 Apr 2024 02:55:39 +0200 Subject: [PATCH 105/426] Bump myuplink dependency to 0.6.0 (#114767) --- homeassistant/components/myuplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/myuplink/manifest.json b/homeassistant/components/myuplink/manifest.json index a76f596ade3..0e638a72715 100644 --- a/homeassistant/components/myuplink/manifest.json +++ b/homeassistant/components/myuplink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/myuplink", "iot_class": "cloud_polling", - "requirements": ["myuplink==0.5.0"] + "requirements": ["myuplink==0.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index d7b500abdd3..257dea984e6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1349,7 +1349,7 @@ mutesync==0.0.1 mypermobil==0.1.8 # homeassistant.components.myuplink -myuplink==0.5.0 +myuplink==0.6.0 # homeassistant.components.nad nad-receiver==0.3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 82aae896b59..4dcea7c582d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1088,7 +1088,7 @@ mutesync==0.0.1 mypermobil==0.1.8 # homeassistant.components.myuplink -myuplink==0.5.0 +myuplink==0.6.0 # homeassistant.components.keenetic_ndms2 ndms2-client==0.1.2 From 450be674069530904862168ca2078ed78f8c6c29 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 28 Mar 2024 13:56:08 +0100 Subject: [PATCH 106/426] Update romy to 0.0.9 (#114360) --- homeassistant/components/romy/manifest.json | 2 +- pyproject.toml | 2 -- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/romy/manifest.json b/homeassistant/components/romy/manifest.json index 1257c2d1d60..7e30c418599 100644 --- a/homeassistant/components/romy/manifest.json +++ b/homeassistant/components/romy/manifest.json @@ -5,6 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/romy", "iot_class": "local_polling", - "requirements": ["romy==0.0.7"], + "requirements": ["romy==0.0.9"], "zeroconf": ["_aicu-http._tcp.local."] } diff --git a/pyproject.toml b/pyproject.toml index e0f07fac6b6..ac890603ac3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -513,8 +513,6 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", # https://github.com/pyudev/pyudev/pull/466 - >=0.24.0 "ignore:invalid escape sequence:SyntaxWarning:.*pyudev.monitor", - # https://github.com/xeniter/romy/pull/1 - >=0.0.8 - "ignore:with timeout\\(\\) is deprecated, use async with timeout\\(\\) instead:DeprecationWarning:romy.utils", # https://github.com/grahamwetzler/smart-meter-texas/pull/143 - >0.5.3 "ignore:ssl.OP_NO_SSL\\*/ssl.OP_NO_TLS\\* options are deprecated:DeprecationWarning:smart_meter_texas", # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 diff --git a/requirements_all.txt b/requirements_all.txt index 257dea984e6..a8f9e5877d8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2459,7 +2459,7 @@ rocketchat-API==0.6.1 rokuecp==0.19.2 # homeassistant.components.romy -romy==0.0.7 +romy==0.0.9 # homeassistant.components.roomba roombapy==1.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4dcea7c582d..34d4b8e8644 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1893,7 +1893,7 @@ ring-doorbell[listen]==0.8.9 rokuecp==0.19.2 # homeassistant.components.romy -romy==0.0.7 +romy==0.0.9 # homeassistant.components.roomba roombapy==1.8.1 From a39e1a6428a09d331b32d64360f12d423367ed5e Mon Sep 17 00:00:00 2001 From: Manuel Dipolt Date: Thu, 4 Apr 2024 00:48:35 +0200 Subject: [PATCH 107/426] Update romy to 0.0.10 (#114785) --- homeassistant/components/romy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/romy/manifest.json b/homeassistant/components/romy/manifest.json index 7e30c418599..efb8072ebbc 100644 --- a/homeassistant/components/romy/manifest.json +++ b/homeassistant/components/romy/manifest.json @@ -5,6 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/romy", "iot_class": "local_polling", - "requirements": ["romy==0.0.9"], + "requirements": ["romy==0.0.10"], "zeroconf": ["_aicu-http._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index a8f9e5877d8..9a28576d836 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2459,7 +2459,7 @@ rocketchat-API==0.6.1 rokuecp==0.19.2 # homeassistant.components.romy -romy==0.0.9 +romy==0.0.10 # homeassistant.components.roomba roombapy==1.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 34d4b8e8644..673d47d02a5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1893,7 +1893,7 @@ ring-doorbell[listen]==0.8.9 rokuecp==0.19.2 # homeassistant.components.romy -romy==0.0.9 +romy==0.0.10 # homeassistant.components.roomba roombapy==1.8.1 From ef8e54877fb6283f8104e7c4f917c3fcf5090ba5 Mon Sep 17 00:00:00 2001 From: Aaron Bach Date: Wed, 3 Apr 2024 18:20:20 -0600 Subject: [PATCH 108/426] Fix unhandled `KeyError` during Notion setup (#114787) --- homeassistant/components/notion/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/notion/__init__.py b/homeassistant/components/notion/__init__.py index ca45e3a6d16..1793a0cfd47 100644 --- a/homeassistant/components/notion/__init__.py +++ b/homeassistant/components/notion/__init__.py @@ -108,7 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: (CONF_REFRESH_TOKEN, client.refresh_token), (CONF_USER_UUID, client.user_uuid), ): - if entry.data[key] == value: + if entry.data.get(key) == value: continue entry_updates["data"][key] = value From 3d0bafbdc95b2514ffff276b5fc5354a3e54d772 Mon Sep 17 00:00:00 2001 From: cdheiser <10488026+cdheiser@users.noreply.github.com> Date: Thu, 4 Apr 2024 02:24:02 -0700 Subject: [PATCH 109/426] Fix Lutron light brightness values (#114794) Fix brightness values in light.py Bugfix to set the brightness to 0-100 which is what Lutron expects. --- homeassistant/components/lutron/light.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/lutron/light.py b/homeassistant/components/lutron/light.py index 18b5edd1039..eb003fd431a 100644 --- a/homeassistant/components/lutron/light.py +++ b/homeassistant/components/lutron/light.py @@ -141,7 +141,7 @@ class LutronLight(LutronDevice, LightEntity): else: brightness = self._prev_brightness self._prev_brightness = brightness - args = {"new_level": brightness} + args = {"new_level": to_lutron_level(brightness)} if ATTR_TRANSITION in kwargs: args["fade_time_seconds"] = kwargs[ATTR_TRANSITION] self._lutron_device.set_level(**args) From d8ae7d6955ae199589d57de6fda9e2199e6c7c38 Mon Sep 17 00:00:00 2001 From: Lex Li <425130+lextm@users.noreply.github.com> Date: Fri, 5 Apr 2024 02:41:15 -0400 Subject: [PATCH 110/426] Fix type cast in snmp (#114795) --- homeassistant/components/snmp/sensor.py | 2 +- tests/components/snmp/test_negative_sensor.py | 79 +++++++++++++++++++ 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 tests/components/snmp/test_negative_sensor.py diff --git a/homeassistant/components/snmp/sensor.py b/homeassistant/components/snmp/sensor.py index f55cd07effb..972b9131935 100644 --- a/homeassistant/components/snmp/sensor.py +++ b/homeassistant/components/snmp/sensor.py @@ -270,7 +270,7 @@ class SnmpData: "SNMP OID %s received type=%s and data %s", self._baseoid, type(value), - bytes(value), + value, ) if isinstance(value, NoSuchObject): _LOGGER.error( diff --git a/tests/components/snmp/test_negative_sensor.py b/tests/components/snmp/test_negative_sensor.py new file mode 100644 index 00000000000..c5ac6460841 --- /dev/null +++ b/tests/components/snmp/test_negative_sensor.py @@ -0,0 +1,79 @@ +"""SNMP sensor tests.""" + +from unittest.mock import patch + +from pysnmp.hlapi import Integer32 +import pytest + +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component + + +@pytest.fixture(autouse=True) +def hlapi_mock(): + """Mock out 3rd party API.""" + mock_data = Integer32(-13) + with patch( + "homeassistant.components.snmp.sensor.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + yield + + +async def test_basic_config(hass: HomeAssistant) -> None: + """Test basic entity configuration.""" + + config = { + SENSOR_DOMAIN: { + "platform": "snmp", + "host": "192.168.1.32", + "baseoid": "1.3.6.1.4.1.2021.10.1.3.1", + }, + } + + assert await async_setup_component(hass, SENSOR_DOMAIN, config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.snmp") + assert state.state == "-13" + assert state.attributes == {"friendly_name": "SNMP"} + + +async def test_entity_config(hass: HomeAssistant) -> None: + """Test entity configuration.""" + + config = { + SENSOR_DOMAIN: { + # SNMP configuration + "platform": "snmp", + "host": "192.168.1.32", + "baseoid": "1.3.6.1.4.1.2021.10.1.3.1", + # Entity configuration + "icon": "{{'mdi:one_two_three'}}", + "picture": "{{'blabla.png'}}", + "device_class": "temperature", + "name": "{{'SNMP' + ' ' + 'Sensor'}}", + "state_class": "measurement", + "unique_id": "very_unique", + "unit_of_measurement": "°C", + }, + } + + assert await async_setup_component(hass, SENSOR_DOMAIN, config) + await hass.async_block_till_done() + + entity_registry = er.async_get(hass) + assert entity_registry.async_get("sensor.snmp_sensor").unique_id == "very_unique" + + state = hass.states.get("sensor.snmp_sensor") + assert state.state == "-13" + assert state.attributes == { + "device_class": "temperature", + "entity_picture": "blabla.png", + "friendly_name": "SNMP Sensor", + "icon": "mdi:one_two_three", + "state_class": "measurement", + "unit_of_measurement": "°C", + } From 530725bbfa0274a04b2b0aed72e2592843e0da02 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 20:42:57 -1000 Subject: [PATCH 111/426] Handle ambiguous script actions by using action map order (#114825) --- homeassistant/helpers/config_validation.py | 6 ++++++ tests/helpers/test_config_validation.py | 22 ++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index f7245607be7..70de144d5c8 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1855,6 +1855,12 @@ def determine_script_action(action: dict[str, Any]) -> str: """Determine action type.""" if not (actions := ACTIONS_SET.intersection(action)): raise ValueError("Unable to determine action") + if len(actions) > 1: + # Ambiguous action, select the first one in the + # order of the ACTIONS_MAP + for action_key, _script_action in ACTIONS_MAP.items(): + if action_key in actions: + return _script_action return ACTIONS_MAP[actions.pop()] diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 133e5e80442..9816dc38189 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -1672,3 +1672,25 @@ def test_color_hex() -> None: with pytest.raises(vol.Invalid, match=msg): cv.color_hex(123456) + + +def test_determine_script_action_ambiguous(): + """Test determine script action with ambiguous actions.""" + assert ( + cv.determine_script_action( + { + "type": "is_power", + "condition": "device", + "device_id": "9c2bda81bc7997c981f811c32cafdb22", + "entity_id": "2ee287ec70dd0c6db187b539bee429b7", + "domain": "sensor", + "below": "15", + } + ) + == "condition" + ) + + +def test_determine_script_action_non_ambiguous(): + """Test determine script action with a non ambiguous action.""" + assert cv.determine_script_action({"delay": "00:00:05"}) == "delay" From 319f76cdc844cb369778f7e3a55527ba5253aa1a Mon Sep 17 00:00:00 2001 From: tronikos Date: Thu, 4 Apr 2024 13:06:15 -0700 Subject: [PATCH 112/426] Bump opower to 0.4.3 (#114826) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/opower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index 879aeb0327b..51ad669733b 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.4.2"] + "requirements": ["opower==0.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9a28576d836..ff3b0a8068e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1482,7 +1482,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.4.2 +opower==0.4.3 # homeassistant.components.oralb oralb-ble==0.17.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 673d47d02a5..56703105b7d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1176,7 +1176,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.opower -opower==0.4.2 +opower==0.4.3 # homeassistant.components.oralb oralb-ble==0.17.6 From 0191d3e41b3b655d937aeac92a5c7f9a534f2111 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 09:30:10 -1000 Subject: [PATCH 113/426] Refactor ConfigStore to avoid needing to pass config_dir (#114827) Co-authored-by: Erik --- homeassistant/core.py | 16 +++++++++++----- homeassistant/helpers/storage.py | 13 +++++-------- tests/test_core.py | 3 +++ 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 4794b284fd2..d4510e970f9 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -401,6 +401,7 @@ class HomeAssistant: self.services = ServiceRegistry(self) self.states = StateMachine(self.bus, self.loop) self.config = Config(self, config_dir) + self.config.async_initialize() self.components = loader.Components(self) self.helpers = loader.Helpers(self) self.state: CoreState = CoreState.not_running @@ -2589,12 +2590,12 @@ class ServiceRegistry: class Config: """Configuration settings for Home Assistant.""" + _store: Config._ConfigStore + def __init__(self, hass: HomeAssistant, config_dir: str) -> None: """Initialize a new config object.""" self.hass = hass - self._store = self._ConfigStore(self.hass, config_dir) - self.latitude: float = 0 self.longitude: float = 0 @@ -2645,6 +2646,13 @@ class Config: # If Home Assistant is running in safe mode self.safe_mode: bool = False + def async_initialize(self) -> None: + """Finish initializing a config object. + + This must be called before the config object is used. + """ + self._store = self._ConfigStore(self.hass) + def distance(self, lat: float, lon: float) -> float | None: """Calculate distance from Home Assistant. @@ -2850,7 +2858,6 @@ class Config: "country": self.country, "language": self.language, } - await self._store.async_save(data) # Circular dependency prevents us from generating the class at top level @@ -2860,7 +2867,7 @@ class Config: class _ConfigStore(Store[dict[str, Any]]): """Class to help storing Config data.""" - def __init__(self, hass: HomeAssistant, config_dir: str) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize storage class.""" super().__init__( hass, @@ -2869,7 +2876,6 @@ class Config: private=True, atomic_writes=True, minor_version=CORE_STORAGE_MINOR_VERSION, - config_dir=config_dir, ) self._original_unit_system: str | None = None # from old store 1.1 diff --git a/homeassistant/helpers/storage.py b/homeassistant/helpers/storage.py index 2413a53e605..92a31ae9345 100644 --- a/homeassistant/helpers/storage.py +++ b/homeassistant/helpers/storage.py @@ -95,9 +95,7 @@ async def async_migrator( return config -def get_internal_store_manager( - hass: HomeAssistant, config_dir: str | None = None -) -> _StoreManager: +def get_internal_store_manager(hass: HomeAssistant) -> _StoreManager: """Get the store manager. This function is not part of the API and should only be @@ -105,7 +103,7 @@ def get_internal_store_manager( guaranteed to be stable. """ if STORAGE_MANAGER not in hass.data: - manager = _StoreManager(hass, config_dir or hass.config.config_dir) + manager = _StoreManager(hass) hass.data[STORAGE_MANAGER] = manager return hass.data[STORAGE_MANAGER] @@ -116,13 +114,13 @@ class _StoreManager: The store manager is used to cache and manage storage files. """ - def __init__(self, hass: HomeAssistant, config_dir: str) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize storage manager class.""" self._hass = hass self._invalidated: set[str] = set() self._files: set[str] | None = None self._data_preload: dict[str, json_util.JsonValueType] = {} - self._storage_path: Path = Path(config_dir).joinpath(STORAGE_DIR) + self._storage_path: Path = Path(hass.config.config_dir).joinpath(STORAGE_DIR) self._cancel_cleanup: asyncio.TimerHandle | None = None async def async_initialize(self) -> None: @@ -251,7 +249,6 @@ class Store(Generic[_T]): encoder: type[JSONEncoder] | None = None, minor_version: int = 1, read_only: bool = False, - config_dir: str | None = None, ) -> None: """Initialize storage class.""" self.version = version @@ -268,7 +265,7 @@ class Store(Generic[_T]): self._atomic_writes = atomic_writes self._read_only = read_only self._next_write_time = 0.0 - self._manager = get_internal_store_manager(hass, config_dir) + self._manager = get_internal_store_manager(hass) @cached_property def path(self): diff --git a/tests/test_core.py b/tests/test_core.py index a0a197096cd..905d8efe6de 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -2288,6 +2288,7 @@ async def test_additional_data_in_core_config( ) -> None: """Test that we can handle additional data in core configuration.""" config = ha.Config(hass, "/test/ha-config") + config.async_initialize() hass_storage[ha.CORE_STORAGE_KEY] = { "version": 1, "data": {"location_name": "Test Name", "additional_valid_key": "value"}, @@ -2301,6 +2302,7 @@ async def test_incorrect_internal_external_url( ) -> None: """Test that we warn when detecting invalid internal/external url.""" config = ha.Config(hass, "/test/ha-config") + config.async_initialize() hass_storage[ha.CORE_STORAGE_KEY] = { "version": 1, @@ -2314,6 +2316,7 @@ async def test_incorrect_internal_external_url( assert "Invalid internal_url set" not in caplog.text config = ha.Config(hass, "/test/ha-config") + config.async_initialize() hass_storage[ha.CORE_STORAGE_KEY] = { "version": 1, From aa14793479a4f481de3508ddd49189fee1d8cfe9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 4 Apr 2024 13:45:44 +0200 Subject: [PATCH 114/426] Avoid blocking IO in downloader initialization (#114841) * Avoid blocking IO in downloader initialization * Avoid blocking IO in downloader initialization --- homeassistant/components/downloader/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/downloader/__init__.py b/homeassistant/components/downloader/__init__.py index 94d243e2cf2..3ca503a2167 100644 --- a/homeassistant/components/downloader/__init__.py +++ b/homeassistant/components/downloader/__init__.py @@ -83,7 +83,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not os.path.isabs(download_path): download_path = hass.config.path(download_path) - if not os.path.isdir(download_path): + if not await hass.async_add_executor_job(os.path.isdir, download_path): _LOGGER.error( "Download path %s does not exist. File Downloader not active", download_path ) From 58533f02af7c612abdc51ce7ebcda20093ffe267 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 5 Apr 2024 11:32:59 +0200 Subject: [PATCH 115/426] Fix Downloader YAML import (#114844) --- .../components/downloader/__init__.py | 10 +++- tests/components/downloader/test_init.py | 51 +++++++++++++++++++ 2 files changed, 59 insertions(+), 2 deletions(-) create mode 100644 tests/components/downloader/test_init.py diff --git a/homeassistant/components/downloader/__init__.py b/homeassistant/components/downloader/__init__.py index 3ca503a2167..d110c28785a 100644 --- a/homeassistant/components/downloader/__init__.py +++ b/homeassistant/components/downloader/__init__.py @@ -43,6 +43,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if DOMAIN not in config: return True + hass.async_create_task(_async_import_config(hass, config), eager_start=True) + return True + + +async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None: + """Import the Downloader component from the YAML file.""" + import_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, @@ -62,7 +69,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass, DOMAIN, f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.9.0", + breaks_in_ha_version="2024.10.0", is_fixable=False, issue_domain=DOMAIN, severity=IssueSeverity.WARNING, @@ -72,7 +79,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "integration_title": "Downloader", }, ) - return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/tests/components/downloader/test_init.py b/tests/components/downloader/test_init.py new file mode 100644 index 00000000000..8cd0d00b1ab --- /dev/null +++ b/tests/components/downloader/test_init.py @@ -0,0 +1,51 @@ +"""Tests for the downloader component init.""" + +from unittest.mock import patch + +from homeassistant.components.downloader import ( + CONF_DOWNLOAD_DIR, + DOMAIN, + SERVICE_DOWNLOAD_FILE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def test_initialization(hass: HomeAssistant) -> None: + """Test the initialization of the downloader component.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_DOWNLOAD_DIR: "/test_dir", + }, + ) + config_entry.add_to_hass(hass) + with patch("os.path.isdir", return_value=True): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_import(hass: HomeAssistant) -> None: + """Test the import of the downloader component.""" + with patch("os.path.isdir", return_value=True): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + assert config_entry.data == {CONF_DOWNLOAD_DIR: "/test_dir"} + assert config_entry.state is ConfigEntryState.LOADED + assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) From 411e55d0596dac418215e297fd7ec9365bc5b6ef Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 4 Apr 2024 21:01:15 +0200 Subject: [PATCH 116/426] Update frontend to 20240404.0 (#114859) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 1890572bf5a..75c630b4471 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240403.1"] + "requirements": ["home-assistant-frontend==20240404.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6bb6bd4d2d3..d520d7f8f76 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240403.1 +home-assistant-frontend==20240404.0 home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index ff3b0a8068e..51de44eaca6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240403.1 +home-assistant-frontend==20240404.0 # homeassistant.components.conversation home-assistant-intents==2024.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 56703105b7d..eb1fb0583cb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240403.1 +home-assistant-frontend==20240404.0 # homeassistant.components.conversation home-assistant-intents==2024.4.3 From 96003e3562c0cdc91d06556fd1ae06c062bcd237 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 5 Apr 2024 08:28:35 +0200 Subject: [PATCH 117/426] Fix Axis camera platform support HTTPS (#114886) --- homeassistant/components/axis/camera.py | 16 ++++++++-------- homeassistant/components/axis/hub/config.py | 3 +++ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/axis/camera.py b/homeassistant/components/axis/camera.py index 769be676a78..025244fb675 100644 --- a/homeassistant/components/axis/camera.py +++ b/homeassistant/components/axis/camera.py @@ -56,6 +56,7 @@ class AxisCamera(AxisEntity, MjpegCamera): mjpeg_url=self.mjpeg_source, still_image_url=self.image_source, authentication=HTTP_DIGEST_AUTHENTICATION, + verify_ssl=False, unique_id=f"{hub.unique_id}-camera", ) @@ -74,16 +75,18 @@ class AxisCamera(AxisEntity, MjpegCamera): Additionally used when device change IP address. """ + proto = self.hub.config.protocol + host = self.hub.config.host + port = self.hub.config.port + image_options = self.generate_options(skip_stream_profile=True) self._still_image_url = ( - f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi" - f"/jpg/image.cgi{image_options}" + f"{proto}://{host}:{port}/axis-cgi/jpg/image.cgi{image_options}" ) mjpeg_options = self.generate_options() self._mjpeg_url = ( - f"http://{self.hub.config.host}:{self.hub.config.port}/axis-cgi" - f"/mjpg/video.cgi{mjpeg_options}" + f"{proto}://{host}:{port}/axis-cgi/mjpg/video.cgi{mjpeg_options}" ) stream_options = self.generate_options(add_video_codec_h264=True) @@ -95,10 +98,7 @@ class AxisCamera(AxisEntity, MjpegCamera): self.hub.additional_diagnostics["camera_sources"] = { "Image": self._still_image_url, "MJPEG": self._mjpeg_url, - "Stream": ( - f"rtsp://user:pass@{self.hub.config.host}/axis-media" - f"/media.amp{stream_options}" - ), + "Stream": (f"rtsp://user:pass@{host}/axis-media/media.amp{stream_options}"), } @property diff --git a/homeassistant/components/axis/hub/config.py b/homeassistant/components/axis/hub/config.py index e6d8378b45c..eba706edc83 100644 --- a/homeassistant/components/axis/hub/config.py +++ b/homeassistant/components/axis/hub/config.py @@ -12,6 +12,7 @@ from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_PORT, + CONF_PROTOCOL, CONF_TRIGGER_TIME, CONF_USERNAME, ) @@ -31,6 +32,7 @@ class AxisConfig: entry: ConfigEntry + protocol: str host: str port: int username: str @@ -54,6 +56,7 @@ class AxisConfig: options = config_entry.options return cls( entry=config_entry, + protocol=config.get(CONF_PROTOCOL, "http"), host=config[CONF_HOST], username=config[CONF_USERNAME], password=config[CONF_PASSWORD], From 618fa08ab2db104c768b772d611f517aaa349c3e Mon Sep 17 00:00:00 2001 From: Jeef Date: Thu, 4 Apr 2024 19:37:54 -0600 Subject: [PATCH 118/426] Bump weatherflow4py to 0.2.20 (#114888) --- homeassistant/components/weatherflow_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 8376bd1b50d..361349dcbe8 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", - "requirements": ["weatherflow4py==0.2.17"] + "requirements": ["weatherflow4py==0.2.20"] } diff --git a/requirements_all.txt b/requirements_all.txt index 51de44eaca6..ca23e4beaf7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2838,7 +2838,7 @@ watchdog==2.3.1 waterfurnace==1.1.0 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.17 +weatherflow4py==0.2.20 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eb1fb0583cb..a3b058f2029 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2185,7 +2185,7 @@ wallbox==0.6.0 watchdog==2.3.1 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.17 +weatherflow4py==0.2.20 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 From 2434a22e4e487c9a654b6f23a365fbc84f83ab36 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 5 Apr 2024 09:47:49 +0200 Subject: [PATCH 119/426] Fix Axis reconfigure step not providing protocols as alternatives but as string (#114889) --- homeassistant/components/axis/config_flow.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/axis/config_flow.py b/homeassistant/components/axis/config_flow.py index 30bc653c202..80872fc9be4 100644 --- a/homeassistant/components/axis/config_flow.py +++ b/homeassistant/components/axis/config_flow.py @@ -168,16 +168,13 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): self, entry_data: Mapping[str, Any], keep_password: bool ) -> ConfigFlowResult: """Re-run configuration step.""" + protocol = entry_data.get(CONF_PROTOCOL, "http") + password = entry_data[CONF_PASSWORD] if keep_password else "" self.discovery_schema = { - vol.Required( - CONF_PROTOCOL, default=entry_data.get(CONF_PROTOCOL, "http") - ): str, + vol.Required(CONF_PROTOCOL, default=protocol): vol.In(PROTOCOL_CHOICES), vol.Required(CONF_HOST, default=entry_data[CONF_HOST]): str, vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str, - vol.Required( - CONF_PASSWORD, - default=entry_data[CONF_PASSWORD] if keep_password else "", - ): str, + vol.Required(CONF_PASSWORD, default=password): str, vol.Required(CONF_PORT, default=entry_data[CONF_PORT]): int, } From 71877fdeda060229db38b40a27cad375144d6c0c Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 5 Apr 2024 00:26:07 +0200 Subject: [PATCH 120/426] Update frontend to 20240404.1 (#114890) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 75c630b4471..028fb28f01b 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240404.0"] + "requirements": ["home-assistant-frontend==20240404.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d520d7f8f76..bd35403340f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.79.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240404.0 +home-assistant-frontend==20240404.1 home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index ca23e4beaf7..9bbb5293660 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240404.0 +home-assistant-frontend==20240404.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a3b058f2029..2125f7e13d0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240404.0 +home-assistant-frontend==20240404.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 From 87ffd5ac56750798d222b06c3122f4099e7ffced Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 20:22:00 -1000 Subject: [PATCH 121/426] Ensure all tables have the default table args in the db_schema (#114895) --- homeassistant/components/recorder/db_schema.py | 12 +++++++++++- tests/components/recorder/test_init.py | 7 +++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 5b24448211d..eac743c3d75 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -715,6 +715,7 @@ class Statistics(Base, StatisticsBase): "start_ts", unique=True, ), + _DEFAULT_TABLE_ARGS, ) __tablename__ = TABLE_STATISTICS @@ -732,6 +733,7 @@ class StatisticsShortTerm(Base, StatisticsBase): "start_ts", unique=True, ), + _DEFAULT_TABLE_ARGS, ) __tablename__ = TABLE_STATISTICS_SHORT_TERM @@ -760,7 +762,10 @@ class StatisticsMeta(Base): class RecorderRuns(Base): """Representation of recorder run.""" - __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) + __table_args__ = ( + Index("ix_recorder_runs_start_end", "start", "end"), + _DEFAULT_TABLE_ARGS, + ) __tablename__ = TABLE_RECORDER_RUNS run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) @@ -789,6 +794,7 @@ class MigrationChanges(Base): """Representation of migration changes.""" __tablename__ = TABLE_MIGRATION_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) migration_id: Mapped[str] = mapped_column(String(255), primary_key=True) version: Mapped[int] = mapped_column(SmallInteger) @@ -798,6 +804,8 @@ class SchemaChanges(Base): """Representation of schema version changes.""" __tablename__ = TABLE_SCHEMA_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) + change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) schema_version: Mapped[int | None] = mapped_column(Integer) changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) @@ -816,6 +824,8 @@ class StatisticsRuns(Base): """Representation of statistics run.""" __tablename__ = TABLE_STATISTICS_RUNS + __table_args__ = (_DEFAULT_TABLE_ARGS,) + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index cde2da3cc83..206c356bad8 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -27,6 +27,7 @@ from homeassistant.components.recorder import ( DOMAIN, SQLITE_URL_PREFIX, Recorder, + db_schema, get_instance, migration, pool, @@ -2598,3 +2599,9 @@ async def test_commit_before_commits_pending_writes( await verify_states_in_queue_future await verify_session_commit_future + + +def test_all_tables_use_default_table_args(hass: HomeAssistant) -> None: + """Test that all tables use the default table args.""" + for table in db_schema.Base.metadata.tables.values(): + assert table.kwargs.items() >= db_schema._DEFAULT_TABLE_ARGS.items() From c39d6f07300d3def56be05cc8a8239c71a9b5a14 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 15:28:36 -1000 Subject: [PATCH 122/426] Reduce august polling frequency (#114904) Co-authored-by: TheJulianJES --- homeassistant/components/august/activity.py | 21 +++++++++++- homeassistant/components/august/const.py | 2 +- homeassistant/components/august/subscriber.py | 33 +++++++++---------- tests/components/august/test_lock.py | 23 ++++++++++++- 4 files changed, 58 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/august/activity.py b/homeassistant/components/august/activity.py index ae920383e40..ee180ab5480 100644 --- a/homeassistant/components/august/activity.py +++ b/homeassistant/components/august/activity.py @@ -5,6 +5,7 @@ from __future__ import annotations from datetime import datetime from functools import partial import logging +from time import monotonic from aiohttp import ClientError from yalexs.activity import Activity, ActivityType @@ -26,9 +27,11 @@ _LOGGER = logging.getLogger(__name__) ACTIVITY_STREAM_FETCH_LIMIT = 10 ACTIVITY_CATCH_UP_FETCH_LIMIT = 2500 +INITIAL_LOCK_RESYNC_TIME = 60 + # If there is a storm of activity (ie lock, unlock, door open, door close, etc) # we want to debounce the updates so we don't hammer the activity api too much. -ACTIVITY_DEBOUNCE_COOLDOWN = 3 +ACTIVITY_DEBOUNCE_COOLDOWN = 4 @callback @@ -62,6 +65,7 @@ class ActivityStream(AugustSubscriberMixin): self.pubnub = pubnub self._update_debounce: dict[str, Debouncer] = {} self._update_debounce_jobs: dict[str, HassJob] = {} + self._start_time: float | None = None @callback def _async_update_house_id_later(self, debouncer: Debouncer, _: datetime) -> None: @@ -70,6 +74,7 @@ class ActivityStream(AugustSubscriberMixin): async def async_setup(self) -> None: """Token refresh check and catch up the activity stream.""" + self._start_time = monotonic() update_debounce = self._update_debounce update_debounce_jobs = self._update_debounce_jobs for house_id in self._house_ids: @@ -140,11 +145,25 @@ class ActivityStream(AugustSubscriberMixin): debouncer = self._update_debounce[house_id] debouncer.async_schedule_call() + # Schedule two updates past the debounce time # to ensure we catch the case where the activity # api does not update right away and we need to poll # it again. Sometimes the lock operator or a doorbell # will not show up in the activity stream right away. + # Only do additional polls if we are past + # the initial lock resync time to avoid a storm + # of activity at setup. + if ( + not self._start_time + or monotonic() - self._start_time < INITIAL_LOCK_RESYNC_TIME + ): + _LOGGER.debug( + "Skipping additional updates due to ongoing initial lock resync time" + ) + return + + _LOGGER.debug("Scheduling additional updates for house id %s", house_id) job = self._update_debounce_jobs[house_id] for step in (1, 2): future_updates.append( diff --git a/homeassistant/components/august/const.py b/homeassistant/components/august/const.py index 0cbd21f397e..6aa033c62b2 100644 --- a/homeassistant/components/august/const.py +++ b/homeassistant/components/august/const.py @@ -40,7 +40,7 @@ ATTR_OPERATION_TAG = "tag" # Limit battery, online, and hardware updates to hourly # in order to reduce the number of api requests and # avoid hitting rate limits -MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=1) +MIN_TIME_BETWEEN_DETAIL_UPDATES = timedelta(hours=24) # Activity needs to be checked more frequently as the # doorbell motion and rings are included here diff --git a/homeassistant/components/august/subscriber.py b/homeassistant/components/august/subscriber.py index e800b5cb604..9332080d9ad 100644 --- a/homeassistant/components/august/subscriber.py +++ b/homeassistant/components/august/subscriber.py @@ -49,9 +49,17 @@ class AugustSubscriberMixin: """Call the refresh method.""" self._hass.async_create_task(self._async_refresh(now), eager_start=True) + @callback + def _async_cancel_update_interval(self, _: Event | None = None) -> None: + """Cancel the scheduled update.""" + if self._unsub_interval: + self._unsub_interval() + self._unsub_interval = None + @callback def _async_setup_listeners(self) -> None: """Create interval and stop listeners.""" + self._async_cancel_update_interval() self._unsub_interval = async_track_time_interval( self._hass, self._async_scheduled_refresh, @@ -59,17 +67,12 @@ class AugustSubscriberMixin: name="august refresh", ) - @callback - def _async_cancel_update_interval(_: Event) -> None: - self._stop_interval = None - if self._unsub_interval: - self._unsub_interval() - - self._stop_interval = self._hass.bus.async_listen( - EVENT_HOMEASSISTANT_STOP, - _async_cancel_update_interval, - run_immediately=True, - ) + if not self._stop_interval: + self._stop_interval = self._hass.bus.async_listen( + EVENT_HOMEASSISTANT_STOP, + self._async_cancel_update_interval, + run_immediately=True, + ) @callback def async_unsubscribe_device_id( @@ -82,13 +85,7 @@ class AugustSubscriberMixin: if self._subscriptions: return - - if self._unsub_interval: - self._unsub_interval() - self._unsub_interval = None - if self._stop_interval: - self._stop_interval() - self._stop_interval = None + self._async_cancel_update_interval() @callback def async_signal_device_id_update(self, device_id: str) -> None: diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 39c1745d967..4de931e6979 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -4,9 +4,11 @@ import datetime from unittest.mock import Mock from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory import pytest from yalexs.pubnub_async import AugustPubNub +from homeassistant.components.august.activity import INITIAL_LOCK_RESYNC_TIME from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, STATE_JAMMED, @@ -155,7 +157,9 @@ async def test_one_lock_operation( async def test_one_lock_operation_pubnub_connected( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test lock and unlock operations are async when pubnub is connected.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) @@ -230,6 +234,23 @@ async def test_one_lock_operation_pubnub_connected( == STATE_UNKNOWN ) + freezer.tick(INITIAL_LOCK_RESYNC_TIME) + + pubnub.message( + pubnub, + Mock( + channel=lock_one.pubsub_channel, + timetoken=(dt_util.utcnow().timestamp() + 2) * 10000000, + message={ + "status": "kAugLockState_Unlocked", + }, + ), + ) + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + async def test_lock_jammed(hass: HomeAssistant) -> None: """Test lock gets jammed on unlock.""" From 5d5dc24b3339c9d45f958521ceaf8c8e4def526e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Fri, 5 Apr 2024 10:01:51 +0200 Subject: [PATCH 123/426] Show correct model string in myuplink (#114921) --- homeassistant/components/myuplink/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index 5dee46b24cf..42bb9007789 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations from http import HTTPStatus from aiohttp import ClientError, ClientResponseError -from myuplink import MyUplinkAPI, get_manufacturer, get_system_name +from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform @@ -92,7 +92,7 @@ def create_devices( identifiers={(DOMAIN, device_id)}, name=get_system_name(system), manufacturer=get_manufacturer(device), - model=device.productName, + model=get_model(device), sw_version=device.firmwareCurrent, serial_number=device.product_serial_number, ) From ed3daed86935b6aaf2fb625788fd6599c0273471 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 5 Apr 2024 12:32:09 +0200 Subject: [PATCH 124/426] Create right import issues in Downloader (#114922) * Create right import issues in Downloader * Create right import issues in Downloader * Create right import issues in Downloader * Create right import issues in Downloader * Fix * Fix * Fix * Fix * Apply suggestions from code review Co-authored-by: Martin Hjelmare * Fix --------- Co-authored-by: Martin Hjelmare --- .../components/downloader/__init__.py | 57 +++++++++++------ .../components/downloader/config_flow.py | 12 ++-- .../components/downloader/strings.json | 8 +-- .../components/downloader/test_config_flow.py | 16 +++++ tests/components/downloader/test_init.py | 64 ++++++++++++++++++- 5 files changed, 125 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/downloader/__init__.py b/homeassistant/components/downloader/__init__.py index d110c28785a..3fded1215c4 100644 --- a/homeassistant/components/downloader/__init__.py +++ b/homeassistant/components/downloader/__init__.py @@ -11,7 +11,11 @@ import requests import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + HomeAssistant, + ServiceCall, +) from homeassistant.data_entry_flow import FlowResultType import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -43,7 +47,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if DOMAIN not in config: return True - hass.async_create_task(_async_import_config(hass, config), eager_start=True) + hass.async_create_task(_async_import_config(hass, config)) return True @@ -58,27 +62,40 @@ async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None: }, ) - translation_key = "deprecated_yaml" if ( import_result["type"] == FlowResultType.ABORT - and import_result["reason"] == "import_failed" + and import_result["reason"] != "single_instance_allowed" ): - translation_key = "import_failed" - - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=translation_key, - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Downloader", - }, - ) + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.10.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="directory_does_not_exist", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Downloader", + "url": "/config/integrations/dashboard/add?domain=downloader", + }, + ) + else: + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.10.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Downloader", + }, + ) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index 15af8b56163..94b33f4e93f 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -46,12 +46,16 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" + if self._async_current_entries(): + return self.async_abort(reason="single_instance_allowed") - return await self.async_step_user(user_input) + try: + await self._validate_input(user_input) + except DirectoryDoesNotExist: + return self.async_abort(reason="directory_does_not_exist") + return self.async_create_entry(title=DEFAULT_NAME, data=user_input) async def _validate_input(self, user_input: dict[str, Any]) -> None: """Validate the user input if the directory exists.""" diff --git a/homeassistant/components/downloader/strings.json b/homeassistant/components/downloader/strings.json index 77dd0abd9d3..4cadabf96c6 100644 --- a/homeassistant/components/downloader/strings.json +++ b/homeassistant/components/downloader/strings.json @@ -37,13 +37,9 @@ } }, "issues": { - "deprecated_yaml": { - "title": "The {integration_title} YAML configuration is being removed", - "description": "Configuring {integration_title} using YAML is being removed.\n\nYour configuration is already imported.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." - }, - "import_failed": { + "directory_does_not_exist": { "title": "The {integration_title} failed to import", - "description": "The {integration_title} integration failed to import.\n\nPlease check the logs for more details." + "description": "The {integration_title} integration failed to import because the configured directory does not exist.\n\nEnsure the directory exists and restart Home Assistant to try again or remove the {integration_title} configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." } } } diff --git a/tests/components/downloader/test_config_flow.py b/tests/components/downloader/test_config_flow.py index 5e75a9b33ba..897fbba0c59 100644 --- a/tests/components/downloader/test_config_flow.py +++ b/tests/components/downloader/test_config_flow.py @@ -99,3 +99,19 @@ async def test_import_flow_success(hass: HomeAssistant) -> None: assert result["title"] == "Downloader" assert result["data"] == {} assert result["options"] == {} + + +async def test_import_flow_directory_not_found(hass: HomeAssistant) -> None: + """Test import flow.""" + with patch("os.path.isdir", return_value=False): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + CONF_DOWNLOAD_DIR: "download_dir", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "directory_does_not_exist" diff --git a/tests/components/downloader/test_init.py b/tests/components/downloader/test_init.py index 8cd0d00b1ab..5832c0402b4 100644 --- a/tests/components/downloader/test_init.py +++ b/tests/components/downloader/test_init.py @@ -8,7 +8,8 @@ from homeassistant.components.downloader import ( SERVICE_DOWNLOAD_FILE, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -30,7 +31,7 @@ async def test_initialization(hass: HomeAssistant) -> None: assert config_entry.state is ConfigEntryState.LOADED -async def test_import(hass: HomeAssistant) -> None: +async def test_import(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: """Test the import of the downloader component.""" with patch("os.path.isdir", return_value=True): assert await async_setup_component( @@ -49,3 +50,62 @@ async def test_import(hass: HomeAssistant) -> None: assert config_entry.data == {CONF_DOWNLOAD_DIR: "/test_dir"} assert config_entry.state is ConfigEntryState.LOADED assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN + ) + assert issue + + +async def test_import_directory_missing( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the import of the downloader component.""" + with patch("os.path.isdir", return_value=False): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=DOMAIN + ) + assert issue + + +async def test_import_already_exists( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the import of the downloader component.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_DOWNLOAD_DIR: "/test_dir", + }, + ) + config_entry.add_to_hass(hass) + with patch("os.path.isdir", return_value=True): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN + ) + assert issue From 9937743863c9135b0865f6548aed4d1c88698567 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 5 Apr 2024 11:44:51 +0200 Subject: [PATCH 125/426] Fix cast dashboard in media browser (#114924) --- homeassistant/components/lovelace/cast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/lovelace/cast.py b/homeassistant/components/lovelace/cast.py index 02f5d0c0478..82a92b94ae5 100644 --- a/homeassistant/components/lovelace/cast.py +++ b/homeassistant/components/lovelace/cast.py @@ -179,7 +179,7 @@ async def _get_dashboard_info(hass, url_path): "views": views, } - if config is None: + if config is None or "views" not in config: return data for idx, view in enumerate(config["views"]): From e3c111b1dd0fd41e3d15a6c4fe300bdadfa60885 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 5 Apr 2024 12:34:07 +0200 Subject: [PATCH 126/426] Bump version to 2024.4.1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 6e08c49f970..b642ce6ce8c 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "0" +PATCH_VERSION: Final = "1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index ac890603ac3..2dd3a9632c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.0" +version = "2024.4.1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 47d9879c0c1da274d9939d530441aff9661d2fb5 Mon Sep 17 00:00:00 2001 From: Aidan Timson Date: Thu, 4 Apr 2024 13:25:35 +0100 Subject: [PATCH 127/426] Pin systembridgemodels to 4.0.4 (#114842) --- homeassistant/components/system_bridge/manifest.json | 2 +- requirements_all.txt | 3 +++ requirements_test_all.txt | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/system_bridge/manifest.json b/homeassistant/components/system_bridge/manifest.json index b4365fda778..aea66d22f62 100644 --- a/homeassistant/components/system_bridge/manifest.json +++ b/homeassistant/components/system_bridge/manifest.json @@ -10,6 +10,6 @@ "iot_class": "local_push", "loggers": ["systembridgeconnector"], "quality_scale": "silver", - "requirements": ["systembridgeconnector==4.0.3"], + "requirements": ["systembridgeconnector==4.0.3", "systembridgemodels==4.0.4"], "zeroconf": ["_system-bridge._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 9bbb5293660..a87df9614d1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2654,6 +2654,9 @@ synology-srm==0.2.0 # homeassistant.components.system_bridge systembridgeconnector==4.0.3 +# homeassistant.components.system_bridge +systembridgemodels==4.0.4 + # homeassistant.components.tailscale tailscale==0.6.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2125f7e13d0..1f5d01eb46c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2049,6 +2049,9 @@ switchbot-api==2.0.0 # homeassistant.components.system_bridge systembridgeconnector==4.0.3 +# homeassistant.components.system_bridge +systembridgemodels==4.0.4 + # homeassistant.components.tailscale tailscale==0.6.0 From 95606135a629fd698a26266354a5246bb4713c54 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 5 Apr 2024 14:21:24 +0200 Subject: [PATCH 128/426] Fix ROVA validation (#114938) * Fix ROVA validation * Fix ROVA validation --- homeassistant/components/rova/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/rova/sensor.py b/homeassistant/components/rova/sensor.py index e510bcf0caf..f63b9893c02 100644 --- a/homeassistant/components/rova/sensor.py +++ b/homeassistant/components/rova/sensor.py @@ -54,7 +54,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string, vol.Optional(CONF_NAME, default="Rova"): cv.string, vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] + cv.ensure_list, [vol.In(["bio", "paper", "plastic", "residual"])] ), } ) From 61a359e4d21140b7147abd006e07ee70689110b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niccol=C3=B2=20Maggioni?= Date: Mon, 8 Apr 2024 10:04:59 +0200 Subject: [PATCH 129/426] Fix hang in SNMP device_tracker implementation (#112815) Co-authored-by: J. Nick Koston --- CODEOWNERS | 2 + .../components/snmp/device_tracker.py | 154 ++++++++++++------ homeassistant/components/snmp/manifest.json | 2 +- 3 files changed, 110 insertions(+), 48 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 85603250b7c..c6cee80ea80 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1249,6 +1249,8 @@ build.json @home-assistant/supervisor /homeassistant/components/sms/ @ocalvo /homeassistant/components/snapcast/ @luar123 /tests/components/snapcast/ @luar123 +/homeassistant/components/snmp/ @nmaggioni +/tests/components/snmp/ @nmaggioni /homeassistant/components/snooz/ @AustinBrunkhorst /tests/components/snooz/ @AustinBrunkhorst /homeassistant/components/solaredge/ @frenck diff --git a/homeassistant/components/snmp/device_tracker.py b/homeassistant/components/snmp/device_tracker.py index 4b8ab073b9c..a1a91116f0f 100644 --- a/homeassistant/components/snmp/device_tracker.py +++ b/homeassistant/components/snmp/device_tracker.py @@ -5,8 +5,19 @@ from __future__ import annotations import binascii import logging -from pysnmp.entity import config as cfg -from pysnmp.entity.rfc3413.oneliner import cmdgen +from pysnmp.error import PySnmpError +from pysnmp.hlapi.asyncio import ( + CommunityData, + ContextData, + ObjectIdentity, + ObjectType, + SnmpEngine, + Udp6TransportTarget, + UdpTransportTarget, + UsmUserData, + bulkWalkCmd, + isEndOfMib, +) import voluptuous as vol from homeassistant.components.device_tracker import ( @@ -24,7 +35,13 @@ from .const import ( CONF_BASEOID, CONF_COMMUNITY, CONF_PRIV_KEY, + DEFAULT_AUTH_PROTOCOL, DEFAULT_COMMUNITY, + DEFAULT_PORT, + DEFAULT_PRIV_PROTOCOL, + DEFAULT_TIMEOUT, + DEFAULT_VERSION, + SNMP_VERSIONS, ) _LOGGER = logging.getLogger(__name__) @@ -40,9 +57,12 @@ PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( ) -def get_scanner(hass: HomeAssistant, config: ConfigType) -> SnmpScanner | None: +async def async_get_scanner( + hass: HomeAssistant, config: ConfigType +) -> SnmpScanner | None: """Validate the configuration and return an SNMP scanner.""" scanner = SnmpScanner(config[DOMAIN]) + await scanner.async_init() return scanner if scanner.success_init else None @@ -51,39 +71,75 @@ class SnmpScanner(DeviceScanner): """Queries any SNMP capable Access Point for connected devices.""" def __init__(self, config): - """Initialize the scanner.""" + """Initialize the scanner and test the target device.""" + host = config[CONF_HOST] + community = config[CONF_COMMUNITY] + baseoid = config[CONF_BASEOID] + authkey = config.get(CONF_AUTH_KEY) + authproto = DEFAULT_AUTH_PROTOCOL + privkey = config.get(CONF_PRIV_KEY) + privproto = DEFAULT_PRIV_PROTOCOL - self.snmp = cmdgen.CommandGenerator() + try: + # Try IPv4 first. + target = UdpTransportTarget((host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT) + except PySnmpError: + # Then try IPv6. + try: + target = Udp6TransportTarget( + (host, DEFAULT_PORT), timeout=DEFAULT_TIMEOUT + ) + except PySnmpError as err: + _LOGGER.error("Invalid SNMP host: %s", err) + return - self.host = cmdgen.UdpTransportTarget((config[CONF_HOST], 161)) - if CONF_AUTH_KEY not in config or CONF_PRIV_KEY not in config: - self.auth = cmdgen.CommunityData(config[CONF_COMMUNITY]) + if authkey is not None or privkey is not None: + if not authkey: + authproto = "none" + if not privkey: + privproto = "none" + + request_args = [ + SnmpEngine(), + UsmUserData( + community, + authKey=authkey or None, + privKey=privkey or None, + authProtocol=authproto, + privProtocol=privproto, + ), + target, + ContextData(), + ] else: - self.auth = cmdgen.UsmUserData( - config[CONF_COMMUNITY], - config[CONF_AUTH_KEY], - config[CONF_PRIV_KEY], - authProtocol=cfg.usmHMACSHAAuthProtocol, - privProtocol=cfg.usmAesCfb128Protocol, - ) - self.baseoid = cmdgen.MibVariable(config[CONF_BASEOID]) - self.last_results = [] + request_args = [ + SnmpEngine(), + CommunityData(community, mpModel=SNMP_VERSIONS[DEFAULT_VERSION]), + target, + ContextData(), + ] - # Test the router is accessible - data = self.get_snmp_data() + self.request_args = request_args + self.baseoid = baseoid + self.last_results = [] + self.success_init = False + + async def async_init(self): + """Make a one-off read to check if the target device is reachable and readable.""" + data = await self.async_get_snmp_data() self.success_init = data is not None - def scan_devices(self): + async def async_scan_devices(self): """Scan for new devices and return a list with found device IDs.""" - self._update_info() + await self._async_update_info() return [client["mac"] for client in self.last_results if client.get("mac")] - def get_device_name(self, device): + async def async_get_device_name(self, device): """Return the name of the given device or None if we don't know.""" # We have no names return None - def _update_info(self): + async def _async_update_info(self): """Ensure the information from the device is up to date. Return boolean if scanning successful. @@ -91,38 +147,42 @@ class SnmpScanner(DeviceScanner): if not self.success_init: return False - if not (data := self.get_snmp_data()): + if not (data := await self.async_get_snmp_data()): return False self.last_results = data return True - def get_snmp_data(self): + async def async_get_snmp_data(self): """Fetch MAC addresses from access point via SNMP.""" devices = [] - errindication, errstatus, errindex, restable = self.snmp.nextCmd( - self.auth, self.host, self.baseoid + walker = bulkWalkCmd( + *self.request_args, + 0, + 50, + ObjectType(ObjectIdentity(self.baseoid)), + lexicographicMode=False, ) + async for errindication, errstatus, errindex, res in walker: + if errindication: + _LOGGER.error("SNMPLIB error: %s", errindication) + return + if errstatus: + _LOGGER.error( + "SNMP error: %s at %s", + errstatus.prettyPrint(), + errindex and res[int(errindex) - 1][0] or "?", + ) + return - if errindication: - _LOGGER.error("SNMPLIB error: %s", errindication) - return - if errstatus: - _LOGGER.error( - "SNMP error: %s at %s", - errstatus.prettyPrint(), - errindex and restable[int(errindex) - 1][0] or "?", - ) - return - - for resrow in restable: - for _, val in resrow: - try: - mac = binascii.hexlify(val.asOctets()).decode("utf-8") - except AttributeError: - continue - _LOGGER.debug("Found MAC address: %s", mac) - mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)]) - devices.append({"mac": mac}) + for _oid, value in res: + if not isEndOfMib(res): + try: + mac = binascii.hexlify(value.asOctets()).decode("utf-8") + except AttributeError: + continue + _LOGGER.debug("Found MAC address: %s", mac) + mac = ":".join([mac[i : i + 2] for i in range(0, len(mac), 2)]) + devices.append({"mac": mac}) return devices diff --git a/homeassistant/components/snmp/manifest.json b/homeassistant/components/snmp/manifest.json index c4aa82f2a74..d79910c44cd 100644 --- a/homeassistant/components/snmp/manifest.json +++ b/homeassistant/components/snmp/manifest.json @@ -1,7 +1,7 @@ { "domain": "snmp", "name": "SNMP", - "codeowners": [], + "codeowners": ["@nmaggioni"], "documentation": "https://www.home-assistant.io/integrations/snmp", "iot_class": "local_polling", "loggers": ["pyasn1", "pysmi", "pysnmp"], From 93569e38278ddfc46ee55545e8b9c24de54b0f53 Mon Sep 17 00:00:00 2001 From: Benjamin <46243805+bbr111@users.noreply.github.com> Date: Thu, 4 Apr 2024 11:45:01 +0200 Subject: [PATCH 130/426] Fix missing if statement in homematic (#114832) * homematic fix issue #114807 Update climate.py * Update homeassistant/components/homematic/climate.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/homematic/climate.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index efdb9324f76..16c345c5635 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -113,7 +113,11 @@ class HMThermostat(HMDevice, ClimateEntity): @property def preset_modes(self): """Return a list of available preset modes.""" - return [HM_PRESET_MAP[mode] for mode in self._hmdevice.ACTIONNODE] + return [ + HM_PRESET_MAP[mode] + for mode in self._hmdevice.ACTIONNODE + if mode in HM_PRESET_MAP + ] @property def current_humidity(self): From d1b1d6388f99419667f0a7b6898a993618c9c439 Mon Sep 17 00:00:00 2001 From: Nathan Spencer Date: Sat, 6 Apr 2024 03:16:00 -0700 Subject: [PATCH 131/426] Bump pylitterbot to 2023.4.11 (#114918) --- .../components/litterrobot/manifest.json | 2 +- .../components/litterrobot/vacuum.py | 1 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/litterrobot/common.py | 3 ++- tests/components/litterrobot/test_sensor.py | 2 +- tests/components/litterrobot/test_vacuum.py | 26 +++++++++++++++++++ 7 files changed, 33 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/litterrobot/manifest.json b/homeassistant/components/litterrobot/manifest.json index ea096a908fc..66ade5f356c 100644 --- a/homeassistant/components/litterrobot/manifest.json +++ b/homeassistant/components/litterrobot/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["pylitterbot"], - "requirements": ["pylitterbot==2023.4.9"] + "requirements": ["pylitterbot==2023.4.11"] } diff --git a/homeassistant/components/litterrobot/vacuum.py b/homeassistant/components/litterrobot/vacuum.py index 4f9efa2dff7..d752609d7de 100644 --- a/homeassistant/components/litterrobot/vacuum.py +++ b/homeassistant/components/litterrobot/vacuum.py @@ -35,6 +35,7 @@ LITTER_BOX_STATUS_STATE_MAP = { LitterBoxStatus.CLEAN_CYCLE: STATE_CLEANING, LitterBoxStatus.EMPTY_CYCLE: STATE_CLEANING, LitterBoxStatus.CLEAN_CYCLE_COMPLETE: STATE_DOCKED, + LitterBoxStatus.CAT_DETECTED: STATE_DOCKED, LitterBoxStatus.CAT_SENSOR_TIMING: STATE_DOCKED, LitterBoxStatus.DRAWER_FULL_1: STATE_DOCKED, LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED, diff --git a/requirements_all.txt b/requirements_all.txt index a87df9614d1..61e91226c78 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1943,7 +1943,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.9 +pylitterbot==2023.4.11 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1f5d01eb46c..ac619efc85a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1509,7 +1509,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.9 +pylitterbot==2023.4.11 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 diff --git a/tests/components/litterrobot/common.py b/tests/components/litterrobot/common.py index fe6202edc47..cac81aad4ef 100644 --- a/tests/components/litterrobot/common.py +++ b/tests/components/litterrobot/common.py @@ -33,6 +33,7 @@ ROBOT_4_DATA = { "wifiRssi": -53.0, "unitPowerType": "AC", "catWeight": 12.0, + "displayCode": "DC_MODE_IDLE", "unitTimezone": "America/New_York", "unitTime": None, "cleanCycleWaitTime": 15, @@ -66,7 +67,7 @@ ROBOT_4_DATA = { "isDFIResetPending": False, "DFINumberOfCycles": 104, "DFILevelPercent": 76, - "isDFIFull": True, + "isDFIFull": False, "DFIFullCounter": 3, "DFITriggerCount": 42, "litterLevel": 460, diff --git a/tests/components/litterrobot/test_sensor.py b/tests/components/litterrobot/test_sensor.py index 9002894d0ab..8d1f2b68e05 100644 --- a/tests/components/litterrobot/test_sensor.py +++ b/tests/components/litterrobot/test_sensor.py @@ -86,7 +86,7 @@ async def test_litter_robot_sensor( assert sensor.state == "2022-09-17T12:06:37+00:00" assert sensor.attributes["device_class"] == SensorDeviceClass.TIMESTAMP sensor = hass.states.get("sensor.test_status_code") - assert sensor.state == "dfs" + assert sensor.state == "rdy" assert sensor.attributes["device_class"] == SensorDeviceClass.ENUM sensor = hass.states.get("sensor.test_litter_level") assert sensor.state == "70.0" diff --git a/tests/components/litterrobot/test_vacuum.py b/tests/components/litterrobot/test_vacuum.py index 9013d6e83eb..68ebae1e239 100644 --- a/tests/components/litterrobot/test_vacuum.py +++ b/tests/components/litterrobot/test_vacuum.py @@ -5,6 +5,7 @@ from __future__ import annotations from typing import Any from unittest.mock import MagicMock +from pylitterbot import Robot import pytest from homeassistant.components.litterrobot import DOMAIN @@ -16,6 +17,7 @@ from homeassistant.components.vacuum import ( SERVICE_STOP, STATE_DOCKED, STATE_ERROR, + STATE_PAUSED, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -96,6 +98,30 @@ async def test_vacuum_with_error( assert vacuum.state == STATE_ERROR +@pytest.mark.parametrize( + ("robot_data", "expected_state"), + [ + ({"displayCode": "DC_CAT_DETECT"}, STATE_DOCKED), + ({"isDFIFull": True}, STATE_ERROR), + ({"robotCycleState": "CYCLE_STATE_CAT_DETECT"}, STATE_PAUSED), + ], +) +async def test_vacuum_states( + hass: HomeAssistant, + mock_account_with_litterrobot_4: MagicMock, + robot_data: dict[str, str | bool], + expected_state: str, +) -> None: + """Test sending commands to the switch.""" + await setup_integration(hass, mock_account_with_litterrobot_4, PLATFORM_DOMAIN) + robot: Robot = mock_account_with_litterrobot_4.robots[0] + robot._update_data(robot_data, partial=True) + + vacuum = hass.states.get(VACUUM_ENTITY_ID) + assert vacuum + assert vacuum.state == expected_state + + @pytest.mark.parametrize( ("service", "command", "extra"), [ From 0a2d79f63ef16c645af7a208497bff704424b897 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 5 Apr 2024 19:45:24 +0200 Subject: [PATCH 132/426] Fix Snapcast Config flow (#114952) --- homeassistant/components/snapcast/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/snapcast/config_flow.py b/homeassistant/components/snapcast/config_flow.py index c9f69c48ab5..b37921fd374 100644 --- a/homeassistant/components/snapcast/config_flow.py +++ b/homeassistant/components/snapcast/config_flow.py @@ -45,7 +45,7 @@ class SnapcastConfigFlow(ConfigFlow, domain=DOMAIN): except OSError: errors["base"] = "cannot_connect" else: - await client.stop() + client.stop() return self.async_create_entry(title=DEFAULT_TITLE, data=user_input) return self.async_show_form( step_id="user", data_schema=SNAPCAST_SCHEMA, errors=errors From e26ea405705f659b283a3c04c1f9917f43750dbf Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Sat, 6 Apr 2024 09:01:55 +0200 Subject: [PATCH 133/426] Bump axis to v61 (#114964) --- homeassistant/components/axis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index 1065783d957..b3898b7aab8 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -26,7 +26,7 @@ "iot_class": "local_push", "loggers": ["axis"], "quality_scale": "platinum", - "requirements": ["axis==60"], + "requirements": ["axis==61"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/requirements_all.txt b/requirements_all.txt index 61e91226c78..3df6e7b6b28 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -514,7 +514,7 @@ aurorapy==0.2.7 # avion==0.10 # homeassistant.components.axis -axis==60 +axis==61 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ac619efc85a..77faf484a1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -454,7 +454,7 @@ auroranoaa==0.0.3 aurorapy==0.2.7 # homeassistant.components.axis -axis==60 +axis==61 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 From dcd9d987a7e9897cd3111e4d6175d2f0f225f4b1 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 8 Apr 2024 10:04:16 +0200 Subject: [PATCH 134/426] Filter out fuzzy translations from Lokalise (#114968) --- script/translations/download.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/script/translations/download.py b/script/translations/download.py index 958a4b35a7b..8f7327c07ec 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -39,6 +39,8 @@ def run_download_docker(): CORE_PROJECT_ID, "--original-filenames=false", "--replace-breaks=false", + "--filter-data", + "nonfuzzy", "--export-empty-as", "skip", "--format", From b29eb317bd7e3382d8b5194310ef0c379494d58b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=98yvind=20Matheson=20Wergeland?= Date: Sat, 6 Apr 2024 09:38:14 +0200 Subject: [PATCH 135/426] Upgrade to pynobo 1.8.1 (#114982) pynobo 1.8.1 --- homeassistant/components/nobo_hub/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nobo_hub/manifest.json b/homeassistant/components/nobo_hub/manifest.json index 4741eb39e29..ce32244e1ce 100644 --- a/homeassistant/components/nobo_hub/manifest.json +++ b/homeassistant/components/nobo_hub/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nobo_hub", "integration_type": "hub", "iot_class": "local_push", - "requirements": ["pynobo==1.8.0"] + "requirements": ["pynobo==1.8.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3df6e7b6b28..d65a9a592c0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1991,7 +1991,7 @@ pynetgear==0.10.10 pynetio==0.1.9.1 # homeassistant.components.nobo_hub -pynobo==1.8.0 +pynobo==1.8.1 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 77faf484a1c..93ebf7a8857 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1545,7 +1545,7 @@ pymysensors==0.24.0 pynetgear==0.10.10 # homeassistant.components.nobo_hub -pynobo==1.8.0 +pynobo==1.8.1 # homeassistant.components.nuki pynuki==1.6.3 From 90bc21b7f652feca610b4a8ead1fd2cfeea7449c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 5 Apr 2024 23:50:29 -1000 Subject: [PATCH 136/426] Fix dictionary changed size during iteration in prometheus (#115005) Fixes #104803 --- homeassistant/components/prometheus/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index d3a307a6616..0ec931ceade 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -258,7 +258,7 @@ class PrometheusMetrics: self, entity_id: str, friendly_name: str | None = None ) -> None: """Remove labelsets matching the given entity id from all metrics.""" - for metric in self._metrics.values(): + for metric in list(self._metrics.values()): for sample in cast(list[prometheus_client.Metric], metric.collect())[ 0 ].samples: From 2f9f1008a55afaeabbf6dedcc638859e41d5146e Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 6 Apr 2024 23:09:46 +0200 Subject: [PATCH 137/426] Bump `brother` to version 4.1.0 (#115021) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/brother/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/brother/manifest.json b/homeassistant/components/brother/manifest.json index 9ca18a95a1e..3bbaf40f686 100644 --- a/homeassistant/components/brother/manifest.json +++ b/homeassistant/components/brother/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], "quality_scale": "platinum", - "requirements": ["brother==4.0.2"], + "requirements": ["brother==4.1.0"], "zeroconf": [ { "type": "_printer._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index d65a9a592c0..5c06b9828e9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -609,7 +609,7 @@ bring-api==0.5.7 broadlink==0.18.3 # homeassistant.components.brother -brother==4.0.2 +brother==4.1.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 93ebf7a8857..8744119c082 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -520,7 +520,7 @@ bring-api==0.5.7 broadlink==0.18.3 # homeassistant.components.brother -brother==4.0.2 +brother==4.1.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 From 20e88255dfb9ad447b4e6ada480da869641e8164 Mon Sep 17 00:00:00 2001 From: Matrix Date: Mon, 8 Apr 2024 05:31:52 +0800 Subject: [PATCH 138/426] Bump yolink-api to 0.4.2 (#115026) --- homeassistant/components/yolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 8b3b071161c..cd6759b5864 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.1"] + "requirements": ["yolink-api==0.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5c06b9828e9..ff6662ca0f6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2910,7 +2910,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.1 +yolink-api==0.4.2 # homeassistant.components.youless youless-api==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8744119c082..8ebdb549cb7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2248,7 +2248,7 @@ yalexs==2.0.0 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.1 +yolink-api==0.4.2 # homeassistant.components.youless youless-api==1.0.1 From d6793a756f6288fb980504ef105a870ff1df6571 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 6 Apr 2024 23:18:52 +0200 Subject: [PATCH 139/426] Update xknxproject to 3.7.1 (#115053) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 99c150a8346..af0c6b8d01c 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -12,7 +12,7 @@ "quality_scale": "platinum", "requirements": [ "xknx==2.12.2", - "xknxproject==3.7.0", + "xknxproject==3.7.1", "knx-frontend==2024.1.20.105944" ], "single_config_entry": true diff --git a/requirements_all.txt b/requirements_all.txt index ff6662ca0f6..d48ee5bdaab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2880,7 +2880,7 @@ xiaomi-ble==0.28.0 xknx==2.12.2 # homeassistant.components.knx -xknxproject==3.7.0 +xknxproject==3.7.1 # homeassistant.components.bluesound # homeassistant.components.fritz diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8ebdb549cb7..8ee44387a02 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2224,7 +2224,7 @@ xiaomi-ble==0.28.0 xknx==2.12.2 # homeassistant.components.knx -xknxproject==3.7.0 +xknxproject==3.7.1 # homeassistant.components.bluesound # homeassistant.components.fritz From 75127105b906d2d993bd4ae72a3b81d61e7db54c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 6 Apr 2024 22:54:40 -1000 Subject: [PATCH 140/426] Fix synology_dsm availablity (#115073) * Remove reload on update failure from synology_dsm fixes #115062 The coordinator will retry on its own later, there is no reason to reload here. This was added in #42697 * fix available checks --- .../components/synology_dsm/binary_sensor.py | 2 +- homeassistant/components/synology_dsm/camera.py | 2 +- homeassistant/components/synology_dsm/common.py | 13 +------------ homeassistant/components/synology_dsm/sensor.py | 2 +- homeassistant/components/synology_dsm/switch.py | 2 +- homeassistant/components/synology_dsm/update.py | 2 +- 6 files changed, 6 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/synology_dsm/binary_sensor.py b/homeassistant/components/synology_dsm/binary_sensor.py index 7579f350774..28dc750bc91 100644 --- a/homeassistant/components/synology_dsm/binary_sensor.py +++ b/homeassistant/components/synology_dsm/binary_sensor.py @@ -116,7 +116,7 @@ class SynoDSMSecurityBinarySensor(SynoDSMBinarySensor): @property def available(self) -> bool: """Return True if entity is available.""" - return bool(self._api.security) + return bool(self._api.security) and super().available @property def extra_state_attributes(self) -> dict[str, str]: diff --git a/homeassistant/components/synology_dsm/camera.py b/homeassistant/components/synology_dsm/camera.py index 19f95c710d0..82d15138f05 100644 --- a/homeassistant/components/synology_dsm/camera.py +++ b/homeassistant/components/synology_dsm/camera.py @@ -108,7 +108,7 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C @property def available(self) -> bool: """Return the availability of the camera.""" - return self.camera_data.is_enabled and self.coordinator.last_update_success + return self.camera_data.is_enabled and super().available @property def is_recording(self) -> bool: diff --git a/homeassistant/components/synology_dsm/common.py b/homeassistant/components/synology_dsm/common.py index 4bb52383148..4a7018119be 100644 --- a/homeassistant/components/synology_dsm/common.py +++ b/homeassistant/components/synology_dsm/common.py @@ -286,18 +286,7 @@ class SynoApi: async def async_update(self) -> None: """Update function for updating API information.""" - try: - await self._update() - except SYNOLOGY_CONNECTION_EXCEPTIONS as err: - LOGGER.debug( - "Connection error during update of '%s' with exception: %s", - self._entry.unique_id, - err, - ) - LOGGER.warning( - "Connection error during update, fallback by reloading the entry" - ) - await self._hass.config_entries.async_reload(self._entry.entry_id) + await self._update() async def _update(self) -> None: """Update function for updating API information.""" diff --git a/homeassistant/components/synology_dsm/sensor.py b/homeassistant/components/synology_dsm/sensor.py index 47483ee4a63..4f20a6233f3 100644 --- a/homeassistant/components/synology_dsm/sensor.py +++ b/homeassistant/components/synology_dsm/sensor.py @@ -366,7 +366,7 @@ class SynoDSMUtilSensor(SynoDSMSensor): @property def available(self) -> bool: """Return True if entity is available.""" - return bool(self._api.utilisation) + return bool(self._api.utilisation) and super().available class SynoDSMStorageSensor(SynologyDSMDeviceEntity, SynoDSMSensor): diff --git a/homeassistant/components/synology_dsm/switch.py b/homeassistant/components/synology_dsm/switch.py index 6e1e38675a0..c19cdb8c815 100644 --- a/homeassistant/components/synology_dsm/switch.py +++ b/homeassistant/components/synology_dsm/switch.py @@ -98,7 +98,7 @@ class SynoDSMSurveillanceHomeModeToggle( @property def available(self) -> bool: """Return True if entity is available.""" - return bool(self._api.surveillance_station) + return bool(self._api.surveillance_station) and super().available @property def device_info(self) -> DeviceInfo: diff --git a/homeassistant/components/synology_dsm/update.py b/homeassistant/components/synology_dsm/update.py index 7b1a36c57b3..c7bcff48cea 100644 --- a/homeassistant/components/synology_dsm/update.py +++ b/homeassistant/components/synology_dsm/update.py @@ -59,7 +59,7 @@ class SynoDSMUpdateEntity( @property def available(self) -> bool: """Return True if entity is available.""" - return bool(self._api.upgrade) + return bool(self._api.upgrade) and super().available @property def installed_version(self) -> str | None: From 1322f3891122cdef335a35e9d0871caf82e242f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Sun, 7 Apr 2024 14:20:58 +0200 Subject: [PATCH 141/426] Downgrade hass-nabucasa from 0.80.0 to 0.78.0 (#115078) --- homeassistant/components/cloud/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index eed2bda421b..49a3fc0bf5c 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -8,5 +8,5 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.79.0"] + "requirements": ["hass-nabucasa==0.78.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bd35403340f..4ba42672c4d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -27,7 +27,7 @@ fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.2.0 habluetooth==2.4.2 -hass-nabucasa==0.79.0 +hass-nabucasa==0.78.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 home-assistant-frontend==20240404.1 diff --git a/pyproject.toml b/pyproject.toml index 2dd3a9632c6..ff848d37b8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "fnv-hash-fast==0.5.0", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.79.0", + "hass-nabucasa==0.78.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.0", diff --git a/requirements.txt b/requirements.txt index 1dd9b1811d3..05d66a79873 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ bcrypt==4.1.2 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==0.5.0 -hass-nabucasa==0.79.0 +hass-nabucasa==0.78.0 httpx==0.27.0 home-assistant-bluetooth==1.12.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index d48ee5bdaab..0741ee85dab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1037,7 +1037,7 @@ habitipy==0.2.0 habluetooth==2.4.2 # homeassistant.components.cloud -hass-nabucasa==0.79.0 +hass-nabucasa==0.78.0 # homeassistant.components.splunk hass-splunk==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8ee44387a02..b14195e88e1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -848,7 +848,7 @@ habitipy==0.2.0 habluetooth==2.4.2 # homeassistant.components.cloud -hass-nabucasa==0.79.0 +hass-nabucasa==0.78.0 # homeassistant.components.conversation hassil==1.6.1 From fa88975055590304ccebffdb7b9af34fffbb6398 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Sun, 7 Apr 2024 19:32:15 +0200 Subject: [PATCH 142/426] Bump velbus-aio to 2024.4.1 (#115109) bump velbusaio to 2024.4.1 --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 1c51c58d238..6f817a23325 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.4.0"], + "requirements": ["velbus-aio==2024.4.1"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 0741ee85dab..e64621e3726 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2798,7 +2798,7 @@ vallox-websocket-api==5.1.1 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.4.0 +velbus-aio==2024.4.1 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b14195e88e1..40386b1be2e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2154,7 +2154,7 @@ vallox-websocket-api==5.1.1 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.4.0 +velbus-aio==2024.4.1 # homeassistant.components.venstar venstarcolortouch==0.19 From 19f3ef763dd9a035ef7ff9da54013aec7cd0a6d0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 7 Apr 2024 11:02:53 -1000 Subject: [PATCH 143/426] Terminate scripts with until and while conditions that execute more than 10000 times (#115110) --- homeassistant/helpers/script.py | 63 +++++++++++++++++++++++++++++++++ tests/helpers/test_script.py | 52 +++++++++++++++++++++++++++ 2 files changed, 115 insertions(+) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index a86df259f11..b4e02e0e4ad 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -286,6 +286,9 @@ STATIC_VALIDATION_ACTION_TYPES = ( cv.SCRIPT_ACTION_WAIT_TEMPLATE, ) +REPEAT_WARN_ITERATIONS = 5000 +REPEAT_TERMINATE_ITERATIONS = 10000 + async def async_validate_actions_config( hass: HomeAssistant, actions: list[ConfigType] @@ -846,6 +849,7 @@ class _ScriptRun: # pylint: disable-next=protected-access script = self._script._get_repeat_script(self._step) + warned_too_many_loops = False async def async_run_sequence(iteration, extra_msg=""): self._log("Repeating %s: Iteration %i%s", description, iteration, extra_msg) @@ -916,6 +920,36 @@ class _ScriptRun: _LOGGER.warning("Error in 'while' evaluation:\n%s", ex) break + if iteration > 1: + if iteration > REPEAT_WARN_ITERATIONS: + if not warned_too_many_loops: + warned_too_many_loops = True + _LOGGER.warning( + "While condition %s in script `%s` looped %s times", + repeat[CONF_WHILE], + self._script.name, + REPEAT_WARN_ITERATIONS, + ) + + if iteration > REPEAT_TERMINATE_ITERATIONS: + _LOGGER.critical( + "While condition %s in script `%s` " + "terminated because it looped %s times", + repeat[CONF_WHILE], + self._script.name, + REPEAT_TERMINATE_ITERATIONS, + ) + raise _AbortScript( + f"While condition {repeat[CONF_WHILE]} " + "terminated because it looped " + f" {REPEAT_TERMINATE_ITERATIONS} times" + ) + + # If the user creates a script with a tight loop, + # yield to the event loop so the system stays + # responsive while all the cpu time is consumed. + await asyncio.sleep(0) + await async_run_sequence(iteration) elif CONF_UNTIL in repeat: @@ -934,6 +968,35 @@ class _ScriptRun: _LOGGER.warning("Error in 'until' evaluation:\n%s", ex) break + if iteration >= REPEAT_WARN_ITERATIONS: + if not warned_too_many_loops: + warned_too_many_loops = True + _LOGGER.warning( + "Until condition %s in script `%s` looped %s times", + repeat[CONF_UNTIL], + self._script.name, + REPEAT_WARN_ITERATIONS, + ) + + if iteration >= REPEAT_TERMINATE_ITERATIONS: + _LOGGER.critical( + "Until condition %s in script `%s` " + "terminated because it looped %s times", + repeat[CONF_UNTIL], + self._script.name, + REPEAT_TERMINATE_ITERATIONS, + ) + raise _AbortScript( + f"Until condition {repeat[CONF_UNTIL]} " + "terminated because it looped " + f"{REPEAT_TERMINATE_ITERATIONS} times" + ) + + # If the user creates a script with a tight loop, + # yield to the event loop so the system stays responsive + # while all the cpu time is consumed. + await asyncio.sleep(0) + if saved_repeat_vars: self._variables["repeat"] = saved_repeat_vars else: diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 86fb84eb582..409b3639d43 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -2837,6 +2837,58 @@ async def test_repeat_nested( assert_action_trace(expected_trace) +@pytest.mark.parametrize( + ("condition", "check"), [("while", "above"), ("until", "below")] +) +async def test_repeat_limits( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, condition: str, check: str +) -> None: + """Test limits on repeats prevent the system from hanging.""" + event = "test_event" + events = async_capture_events(hass, event) + hass.states.async_set("sensor.test", "0.5") + + sequence = { + "repeat": { + "sequence": [ + { + "event": event, + }, + ], + } + } + sequence["repeat"][condition] = { + "condition": "numeric_state", + "entity_id": "sensor.test", + check: "0", + } + + with ( + patch.object(script, "REPEAT_WARN_ITERATIONS", 5), + patch.object(script, "REPEAT_TERMINATE_ITERATIONS", 10), + ): + script_obj = script.Script( + hass, cv.SCRIPT_SCHEMA(sequence), f"Test {condition}", "test_domain" + ) + + caplog.clear() + caplog.set_level(logging.WARNING) + + hass.async_create_task(script_obj.async_run(context=Context())) + await asyncio.wait_for(hass.async_block_till_done(), 1) + + title_condition = condition.title() + + assert f"{title_condition} condition" in caplog.text + assert f"in script `Test {condition}` looped 5 times" in caplog.text + assert ( + f"script `Test {condition}` terminated because it looped 10 times" + in caplog.text + ) + + assert len(events) == 10 + + async def test_choose_warning( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: From 7bea6eface6f2aa8ae2dbf103cbce74669fa7721 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 7 Apr 2024 21:07:51 +0200 Subject: [PATCH 144/426] improve handling of incorrect values in fyta integration (#115134) * improve handling of incorrect values * Changes based on review comment * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * update value_fn * ruff --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/fyta/sensor.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index 0643c69981e..2b9e8e3de07 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -46,35 +46,35 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ translation_key="plant_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=lambda value: PLANT_STATUS[value], + value_fn=PLANT_STATUS.get, ), FytaSensorEntityDescription( key="temperature_status", translation_key="temperature_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=lambda value: PLANT_STATUS[value], + value_fn=PLANT_STATUS.get, ), FytaSensorEntityDescription( key="light_status", translation_key="light_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=lambda value: PLANT_STATUS[value], + value_fn=PLANT_STATUS.get, ), FytaSensorEntityDescription( key="moisture_status", translation_key="moisture_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=lambda value: PLANT_STATUS[value], + value_fn=PLANT_STATUS.get, ), FytaSensorEntityDescription( key="salinity_status", translation_key="salinity_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=lambda value: PLANT_STATUS[value], + value_fn=PLANT_STATUS.get, ), FytaSensorEntityDescription( key="temperature", From fc9653581a49f0eb91264b6a09f908fd5e64fbfe Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 7 Apr 2024 21:38:13 +0200 Subject: [PATCH 145/426] Bump fyta_cli to 0.3.5 (#115143) bump fyta_cli to 0.3.5 --- homeassistant/components/fyta/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index a93a76a9e1d..55255777994 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/fyta", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["fyta_cli==0.3.3"] + "requirements": ["fyta_cli==0.3.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index e64621e3726..cbcae805bd0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -899,7 +899,7 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.3.3 +fyta_cli==0.3.5 # homeassistant.components.google_translate gTTS==2.2.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 40386b1be2e..77dbd53a73e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -731,7 +731,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.3.3 +fyta_cli==0.3.5 # homeassistant.components.google_translate gTTS==2.2.4 From 265d04c593cacd305e060a194a1643eb21b808c8 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 7 Apr 2024 22:53:30 +0200 Subject: [PATCH 146/426] Fix fibaro sensor additional sensor lookup (#115148) --- homeassistant/components/fibaro/sensor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/fibaro/sensor.py b/homeassistant/components/fibaro/sensor.py index 6e672e9cc97..fd6ec74050d 100644 --- a/homeassistant/components/fibaro/sensor.py +++ b/homeassistant/components/fibaro/sensor.py @@ -121,6 +121,7 @@ async def async_setup_entry( Platform.COVER, Platform.LIGHT, Platform.LOCK, + Platform.SENSOR, Platform.SWITCH, ) for device in controller.fibaro_devices[platform] From d062ef357b76f11dcc977ebc84a2148dafcfe8fe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 7 Apr 2024 12:51:59 -1000 Subject: [PATCH 147/426] Write timer entity state before firing events (#115151) --- homeassistant/components/timer/__init__.py | 12 ++++++------ tests/components/timer/test_init.py | 17 ++++++++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/timer/__init__.py b/homeassistant/components/timer/__init__.py index 72e93f5655a..5da68d99dd6 100644 --- a/homeassistant/components/timer/__init__.py +++ b/homeassistant/components/timer/__init__.py @@ -325,12 +325,12 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._end = start + self._remaining + self.async_write_ha_state() self.hass.bus.async_fire(event, {ATTR_ENTITY_ID: self.entity_id}) self._listener = async_track_point_in_utc_time( self.hass, self._async_finished, self._end ) - self.async_write_ha_state() @callback def async_change(self, duration: timedelta) -> None: @@ -351,11 +351,11 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._listener() self._end += duration self._remaining = self._end - dt_util.utcnow().replace(microsecond=0) + self.async_write_ha_state() self.hass.bus.async_fire(EVENT_TIMER_CHANGED, {ATTR_ENTITY_ID: self.entity_id}) self._listener = async_track_point_in_utc_time( self.hass, self._async_finished, self._end ) - self.async_write_ha_state() @callback def async_pause(self) -> None: @@ -368,8 +368,8 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._remaining = self._end - dt_util.utcnow().replace(microsecond=0) self._state = STATUS_PAUSED self._end = None - self.hass.bus.async_fire(EVENT_TIMER_PAUSED, {ATTR_ENTITY_ID: self.entity_id}) self.async_write_ha_state() + self.hass.bus.async_fire(EVENT_TIMER_PAUSED, {ATTR_ENTITY_ID: self.entity_id}) @callback def async_cancel(self) -> None: @@ -381,10 +381,10 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._end = None self._remaining = None self._running_duration = self._configured_duration + self.async_write_ha_state() self.hass.bus.async_fire( EVENT_TIMER_CANCELLED, {ATTR_ENTITY_ID: self.entity_id} ) - self.async_write_ha_state() @callback def async_finish(self) -> None: @@ -400,11 +400,11 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._end = None self._remaining = None self._running_duration = self._configured_duration + self.async_write_ha_state() self.hass.bus.async_fire( EVENT_TIMER_FINISHED, {ATTR_ENTITY_ID: self.entity_id, ATTR_FINISHED_AT: end.isoformat()}, ) - self.async_write_ha_state() @callback def _async_finished(self, time: datetime) -> None: @@ -418,11 +418,11 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._end = None self._remaining = None self._running_duration = self._configured_duration + self.async_write_ha_state() self.hass.bus.async_fire( EVENT_TIMER_FINISHED, {ATTR_ENTITY_ID: self.entity_id, ATTR_FINISHED_AT: end.isoformat()}, ) - self.async_write_ha_state() async def async_update_config(self, config: ConfigType) -> None: """Handle when the config is updated.""" diff --git a/tests/components/timer/test_init.py b/tests/components/timer/test_init.py index 5aca1625d1f..c1c9f56094b 100644 --- a/tests/components/timer/test_init.py +++ b/tests/components/timer/test_init.py @@ -45,7 +45,7 @@ from homeassistant.const import ( EVENT_STATE_CHANGED, SERVICE_RELOAD, ) -from homeassistant.core import Context, CoreState, HomeAssistant, State +from homeassistant.core import Context, CoreState, Event, HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.restore_state import StoredState, async_get @@ -156,11 +156,12 @@ async def test_methods_and_events(hass: HomeAssistant) -> None: assert state assert state.state == STATUS_IDLE - results = [] + results: list[tuple[Event, str]] = [] - def fake_event_listener(event): + @callback + def fake_event_listener(event: Event): """Fake event listener for trigger.""" - results.append(event) + results.append((event, hass.states.get("timer.test1").state)) hass.bus.async_listen(EVENT_TIMER_STARTED, fake_event_listener) hass.bus.async_listen(EVENT_TIMER_RESTARTED, fake_event_listener) @@ -262,7 +263,10 @@ async def test_methods_and_events(hass: HomeAssistant) -> None: if step["event"] is not None: expected_events += 1 - assert results[-1].event_type == step["event"] + last_result = results[-1] + event, state = last_result + assert event.event_type == step["event"] + assert state == step["state"] assert len(results) == expected_events @@ -404,6 +408,7 @@ async def test_wait_till_timer_expires(hass: HomeAssistant) -> None: results = [] + @callback def fake_event_listener(event): """Fake event listener for trigger.""" results.append(event) @@ -580,6 +585,7 @@ async def test_timer_restarted_event(hass: HomeAssistant) -> None: results = [] + @callback def fake_event_listener(event): """Fake event listener for trigger.""" results.append(event) @@ -647,6 +653,7 @@ async def test_state_changed_when_timer_restarted(hass: HomeAssistant) -> None: results = [] + @callback def fake_event_listener(event): """Fake event listener for trigger.""" results.append(event) From 9a342f87c0b78ceccc733e27fe53e7f2ef6e20a4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 7 Apr 2024 15:25:55 -1000 Subject: [PATCH 148/426] Avoid checking for polling if an entity fails to add (#115159) * Avoid checking for polling if an entity fails to add * no need to do protected access * no need to do protected access * no need to do protected access * no need to do protected access * coverage * fix test * fix * broken one must be first --- homeassistant/helpers/entity_platform.py | 11 +++++++- tests/helpers/test_entity_platform.py | 36 +++++++++++++++++++++++- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 1cff472af72..6d7ed7ed1b8 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -631,7 +631,16 @@ class EntityPlatform: if ( (self.config_entry and self.config_entry.pref_disable_polling) or self._async_unsub_polling is not None - or not any(entity.should_poll for entity in entities) + or not any( + # Entity may have failed to add or called `add_to_platform_abort` + # so we check if the entity is in self.entities before + # checking `entity.should_poll` since `should_poll` may need to + # check `self.hass` which will be `None` if the entity did not add + entity.entity_id + and entity.entity_id in self.entities + and entity.should_poll + for entity in entities + ) ): return diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 31c6f8e6e30..59c4f7357f3 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -5,7 +5,7 @@ from collections.abc import Iterable from datetime import timedelta import logging from typing import Any -from unittest.mock import ANY, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch import pytest @@ -78,6 +78,40 @@ async def test_polling_only_updates_entities_it_should_poll( assert poll_ent.async_update.called +async def test_polling_check_works_if_entity_add_fails( + hass: HomeAssistant, +) -> None: + """Test the polling check works if an entity add fails.""" + component = EntityComponent(_LOGGER, DOMAIN, hass, timedelta(seconds=20)) + await component.async_setup({}) + + class MockEntityNeedsSelfHassInShouldPoll(MockEntity): + """Mock entity that needs self.hass in should_poll.""" + + @property + def should_poll(self) -> bool: + """Return True if entity has to be polled.""" + return self.hass.data is not None + + working_poll_ent = MockEntityNeedsSelfHassInShouldPoll(should_poll=True) + working_poll_ent.async_update = AsyncMock() + broken_poll_ent = MockEntityNeedsSelfHassInShouldPoll(should_poll=True) + broken_poll_ent.async_update = AsyncMock(side_effect=Exception("Broken")) + + await component.async_add_entities( + [broken_poll_ent, working_poll_ent], update_before_add=True + ) + + working_poll_ent.async_update.reset_mock() + broken_poll_ent.async_update.reset_mock() + + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=20)) + await hass.async_block_till_done(wait_background_tasks=True) + + assert not broken_poll_ent.async_update.called + assert working_poll_ent.async_update.called + + async def test_polling_disabled_by_config_entry(hass: HomeAssistant) -> None: """Test the polling of only updated entities.""" entity_platform = MockEntityPlatform(hass) From 8c0b44d6d52259a1c284adc3a1df77b0c07ab7c6 Mon Sep 17 00:00:00 2001 From: gibwar Date: Mon, 8 Apr 2024 02:05:46 -0600 Subject: [PATCH 149/426] Only reset requested utility meter with no tariff (#115170) --- .../components/utility_meter/sensor.py | 8 +- tests/components/utility_meter/test_sensor.py | 133 ++++++++++++++++++ 2 files changed, 140 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 26582df1b44..014cd93b53b 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -578,7 +578,13 @@ class UtilityMeterSensor(RestoreSensor): async def async_reset_meter(self, entity_id): """Reset meter.""" - if self._tariff is not None and self._tariff_entity != entity_id: + if self._tariff_entity is not None and self._tariff_entity != entity_id: + return + if ( + self._tariff_entity is None + and entity_id is not None + and self.entity_id != entity_id + ): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 99a63809329..43a71eca85e 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -983,6 +983,139 @@ async def test_service_reset_no_tariffs( assert state.attributes.get("last_period") == "3" +@pytest.mark.parametrize( + ("yaml_config", "config_entry_configs"), + [ + ( + { + "utility_meter": { + "energy_bill": { + "source": "sensor.energy", + }, + "water_bill": { + "source": "sensor.water", + }, + }, + }, + None, + ), + ( + None, + [ + { + "cycle": "none", + "delta_values": False, + "name": "Energy bill", + "net_consumption": False, + "offset": 0, + "periodically_resetting": True, + "source": "sensor.energy", + "tariffs": [], + }, + { + "cycle": "none", + "delta_values": False, + "name": "Water bill", + "net_consumption": False, + "offset": 0, + "periodically_resetting": True, + "source": "sensor.water", + "tariffs": [], + }, + ], + ), + ], +) +async def test_service_reset_no_tariffs_correct_with_multi( + hass: HomeAssistant, yaml_config, config_entry_configs +) -> None: + """Test complex utility sensor service reset for multiple sensors with no tarrifs. + + See GitHub issue #114864: Service "utility_meter.reset" affects all meters. + """ + + # Home assistant is not runnit yet + hass.state = CoreState.not_running + last_reset = "2023-10-01T00:00:00+00:00" + + mock_restore_cache_with_extra_data( + hass, + [ + ( + State( + "sensor.energy_bill", + "3", + attributes={ + ATTR_LAST_RESET: last_reset, + }, + ), + {}, + ), + ( + State( + "sensor.water_bill", + "6", + attributes={ + ATTR_LAST_RESET: last_reset, + }, + ), + {}, + ), + ], + ) + + if yaml_config: + assert await async_setup_component(hass, DOMAIN, yaml_config) + await hass.async_block_till_done() + else: + for entry in config_entry_configs: + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options=entry, + title=entry["name"], + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("sensor.energy_bill") + assert state + assert state.state == "3" + assert state.attributes.get("last_reset") == last_reset + assert state.attributes.get("last_period") == "0" + + state = hass.states.get("sensor.water_bill") + assert state + assert state.state == "6" + assert state.attributes.get("last_reset") == last_reset + assert state.attributes.get("last_period") == "0" + + now = dt_util.utcnow() + with freeze_time(now): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_RESET, + service_data={}, + target={"entity_id": "sensor.energy_bill"}, + blocking=True, + ) + + await hass.async_block_till_done() + + state = hass.states.get("sensor.energy_bill") + assert state + assert state.state == "0" + assert state.attributes.get("last_reset") == now.isoformat() + assert state.attributes.get("last_period") == "3" + + state = hass.states.get("sensor.water_bill") + assert state + assert state.state == "6" + assert state.attributes.get("last_reset") == last_reset + assert state.attributes.get("last_period") == "0" + + @pytest.mark.parametrize( ("yaml_config", "config_entry_config"), [ From 05082fcceb9293e078131fb7206ac3cdb288a342 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 8 Apr 2024 10:41:50 +0200 Subject: [PATCH 150/426] Bump version to 2024.4.2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index b642ce6ce8c..e4359f5bbfb 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "1" +PATCH_VERSION: Final = "2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index ff848d37b8a..a6484fa3349 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.1" +version = "2024.4.2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 733e2ec57aed3a196acfa50b2d3a3da019ed7e2e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 11 Apr 2024 11:58:56 -1000 Subject: [PATCH 151/426] Bump aiohttp to 3.9.4 (#110730) * Bump aiohttp to 3.9.4 This is rc0 for now but will be updated when the full release it out * cleanup cruft * regen * fix tests (these changes are fine) * chunk size is too small to read since boundry is now enforced * chunk size is too small to read since boundry is now enforced --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- tests/components/file_upload/test_init.py | 8 ++++---- tests/components/websocket_api/test_auth.py | 2 +- tests/components/websocket_api/test_http.py | 6 +++--- tests/components/websocket_api/test_init.py | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 4ba42672c4d..b8c8b0fcb64 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.0 aiodiscover==2.0.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.3.1 -aiohttp==3.9.3 +aiohttp==3.9.4 aiohttp_cors==0.7.0 astral==2.2 async-interrupt==1.1.1 diff --git a/pyproject.toml b/pyproject.toml index a6484fa3349..9993c8e9cb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] requires-python = ">=3.12.0" dependencies = [ - "aiohttp==3.9.3", + "aiohttp==3.9.4", "aiohttp_cors==0.7.0", "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-zlib-ng==0.3.1", diff --git a/requirements.txt b/requirements.txt index 05d66a79873..519a8287d18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ -c homeassistant/package_constraints.txt # Home Assistant Core -aiohttp==3.9.3 +aiohttp==3.9.4 aiohttp_cors==0.7.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.3.1 diff --git a/tests/components/file_upload/test_init.py b/tests/components/file_upload/test_init.py index 1ef238cafd0..fa77f6e55f5 100644 --- a/tests/components/file_upload/test_init.py +++ b/tests/components/file_upload/test_init.py @@ -90,9 +90,9 @@ async def test_upload_large_file( file_upload.TEMP_DIR_NAME + f"-{getrandbits(10):03x}", ), patch( - # Patch one megabyte to 8 bytes to prevent having to use big files in tests + # Patch one megabyte to 50 bytes to prevent having to use big files in tests "homeassistant.components.file_upload.ONE_MEGABYTE", - 8, + 50, ), ): res = await client.post("/api/file_upload", data={"file": large_file_io}) @@ -152,9 +152,9 @@ async def test_upload_large_file_fails( file_upload.TEMP_DIR_NAME + f"-{getrandbits(10):03x}", ), patch( - # Patch one megabyte to 8 bytes to prevent having to use big files in tests + # Patch one megabyte to 50 bytes to prevent having to use big files in tests "homeassistant.components.file_upload.ONE_MEGABYTE", - 8, + 50, ), patch( "homeassistant.components.file_upload.Path.open", return_value=_mock_open() diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 35bf2402b6c..595dc7dcc32 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -221,7 +221,7 @@ async def test_auth_close_after_revoke( hass.auth.async_remove_refresh_token(refresh_token) msg = await websocket_client.receive() - assert msg.type == aiohttp.WSMsgType.CLOSED + assert msg.type is aiohttp.WSMsgType.CLOSE assert websocket_client.closed diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index db186e4811b..6ce46a5d9fe 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -43,7 +43,7 @@ async def test_pending_msg_overflow( for idx in range(10): await websocket_client.send_json({"id": idx + 1, "type": "ping"}) msg = await websocket_client.receive() - assert msg.type == WSMsgType.CLOSED + assert msg.type is WSMsgType.CLOSE async def test_cleanup_on_cancellation( @@ -249,7 +249,7 @@ async def test_pending_msg_peak( ) msg = await websocket_client.receive() - assert msg.type == WSMsgType.CLOSED + assert msg.type is WSMsgType.CLOSE assert "Client unable to keep up with pending messages" in caplog.text assert "Stayed over 5 for 5 seconds" in caplog.text assert "overload" in caplog.text @@ -297,7 +297,7 @@ async def test_pending_msg_peak_recovery( msg = await websocket_client.receive() assert msg.type == WSMsgType.TEXT msg = await websocket_client.receive() - assert msg.type == WSMsgType.CLOSED + assert msg.type is WSMsgType.CLOSE assert "Client unable to keep up with pending messages" not in caplog.text diff --git a/tests/components/websocket_api/test_init.py b/tests/components/websocket_api/test_init.py index 9360ff4ef8a..b20fd1c2f7e 100644 --- a/tests/components/websocket_api/test_init.py +++ b/tests/components/websocket_api/test_init.py @@ -41,7 +41,7 @@ async def test_quiting_hass(hass: HomeAssistant, websocket_client) -> None: msg = await websocket_client.receive() - assert msg.type == WSMsgType.CLOSED + assert msg.type is WSMsgType.CLOSE async def test_unknown_command(websocket_client) -> None: From 4c6fad8dc3d1c2a8e2e19f273d57b2193b2057aa Mon Sep 17 00:00:00 2001 From: Mike Degatano Date: Thu, 11 Apr 2024 05:23:10 -0400 Subject: [PATCH 152/426] Add support for adopt data disk repair (#114891) --- homeassistant/components/hassio/repairs.py | 2 +- homeassistant/components/hassio/strings.json | 11 +- tests/components/hassio/test_repairs.py | 113 +++++++++++++++++++ 3 files changed, 123 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 8458d7eaac2..63ed3d5c8a3 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -22,7 +22,7 @@ from .const import ( from .handler import async_apply_suggestion from .issues import Issue, Suggestion -SUGGESTION_CONFIRMATION_REQUIRED = {"system_execute_reboot"} +SUGGESTION_CONFIRMATION_REQUIRED = {"system_adopt_data_disk", "system_execute_reboot"} EXTRA_PLACEHOLDERS = { "issue_mount_mount_failed": { diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 77ef408cafe..63c1da4bfd8 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -51,8 +51,15 @@ "title": "Multiple data disks detected", "fix_flow": { "step": { - "system_rename_data_disk": { - "description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the fix option to rename the filesystem to prevent this. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system." + "fix_menu": { + "description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the 'Rename' option to rename the filesystem to prevent this. Use the 'Adopt' option to make that your data disk and rename the existing one. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system.", + "menu_options": { + "system_rename_data_disk": "Rename", + "system_adopt_data_disk": "Adopt" + } + }, + "system_adopt_data_disk": { + "description": "This fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period. After the reboot `{reference}` will be the data disk of Home Assistant and your existing data disk will be renamed and ignored." } }, "abort": { diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index d387968da46..2dffba74fef 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -674,3 +674,116 @@ async def test_supervisor_issue_docker_config_repair_flow( str(aioclient_mock.mock_calls[-1][1]) == "http://127.0.0.1/resolution/suggestion/1235" ) + + +async def test_supervisor_issue_repair_flow_multiple_data_disks( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, + all_setup_requests, +) -> None: + """Test fix flow for multiple data disks supervisor issue.""" + mock_resolution_info( + aioclient_mock, + issues=[ + { + "uuid": "1234", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1235", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + { + "uuid": "1236", + "type": "adopt_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + ], + }, + ], + ) + + assert await async_setup_component(hass, "hassio", {}) + + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert repair_issue + + client = await hass_client() + + resp = await client.post( + "/api/repairs/issues/fix", + json={"handler": "hassio", "issue_id": repair_issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "menu", + "flow_id": flow_id, + "handler": "hassio", + "step_id": "fix_menu", + "data_schema": [ + { + "type": "select", + "options": [ + ["system_rename_data_disk", "system_rename_data_disk"], + ["system_adopt_data_disk", "system_adopt_data_disk"], + ], + "name": "next_step_id", + } + ], + "menu_options": ["system_rename_data_disk", "system_adopt_data_disk"], + "description_placeholders": {"reference": "/dev/sda1"}, + } + + resp = await client.post( + f"/api/repairs/issues/fix/{flow_id}", + json={"next_step_id": "system_adopt_data_disk"}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "form", + "flow_id": flow_id, + "handler": "hassio", + "step_id": "system_adopt_data_disk", + "data_schema": [], + "errors": None, + "description_placeholders": {"reference": "/dev/sda1"}, + "last_step": True, + "preview": None, + } + + resp = await client.post(f"/api/repairs/issues/fix/{flow_id}") + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "create_entry", + "flow_id": flow_id, + "handler": "hassio", + "description": None, + "description_placeholders": None, + } + + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1236" + ) From 922cc81a62e9ccd7ecf01127dfe70f71338aaddf Mon Sep 17 00:00:00 2001 From: On Freund Date: Tue, 9 Apr 2024 10:59:27 +0300 Subject: [PATCH 153/426] Configurable maximum concurrency in Risco local (#115226) * Configurable maximum concurrency in Risco local * Show advanced Risco options in advanced mode --- homeassistant/components/risco/__init__.py | 7 ++- homeassistant/components/risco/config_flow.py | 20 +++++-- homeassistant/components/risco/const.py | 8 ++- homeassistant/components/risco/strings.json | 3 +- tests/components/risco/test_config_flow.py | 53 ++++++++++++++++++- 5 files changed, 83 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/risco/__init__.py b/homeassistant/components/risco/__init__.py index 531cd982a1e..7ca18ea77c5 100644 --- a/homeassistant/components/risco/__init__.py +++ b/homeassistant/components/risco/__init__.py @@ -38,7 +38,9 @@ from homeassistant.helpers.storage import Store from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( + CONF_CONCURRENCY, DATA_COORDINATOR, + DEFAULT_CONCURRENCY, DEFAULT_SCAN_INTERVAL, DOMAIN, EVENTS_COORDINATOR, @@ -85,7 +87,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: data = entry.data - risco = RiscoLocal(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN]) + concurrency = entry.options.get(CONF_CONCURRENCY, DEFAULT_CONCURRENCY) + risco = RiscoLocal( + data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], concurrency=concurrency + ) try: await risco.connect() diff --git a/homeassistant/components/risco/config_flow.py b/homeassistant/components/risco/config_flow.py index 0f13721856c..5822177a243 100644 --- a/homeassistant/components/risco/config_flow.py +++ b/homeassistant/components/risco/config_flow.py @@ -35,8 +35,10 @@ from .const import ( CONF_CODE_ARM_REQUIRED, CONF_CODE_DISARM_REQUIRED, CONF_COMMUNICATION_DELAY, + CONF_CONCURRENCY, CONF_HA_STATES_TO_RISCO, CONF_RISCO_STATES_TO_HA, + DEFAULT_ADVANCED_OPTIONS, DEFAULT_OPTIONS, DOMAIN, MAX_COMMUNICATION_DELAY, @@ -225,11 +227,8 @@ class RiscoOptionsFlowHandler(OptionsFlow): self._data = {**DEFAULT_OPTIONS, **config_entry.options} def _options_schema(self) -> vol.Schema: - return vol.Schema( + schema = vol.Schema( { - vol.Required( - CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL] - ): int, vol.Required( CONF_CODE_ARM_REQUIRED, default=self._data[CONF_CODE_ARM_REQUIRED] ): bool, @@ -239,6 +238,19 @@ class RiscoOptionsFlowHandler(OptionsFlow): ): bool, } ) + if self.show_advanced_options: + self._data = {**DEFAULT_ADVANCED_OPTIONS, **self._data} + schema = schema.extend( + { + vol.Required( + CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL] + ): int, + vol.Required( + CONF_CONCURRENCY, default=self._data[CONF_CONCURRENCY] + ): int, + } + ) + return schema async def async_step_init( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/risco/const.py b/homeassistant/components/risco/const.py index a27aeae4bf0..f1240a704de 100644 --- a/homeassistant/components/risco/const.py +++ b/homeassistant/components/risco/const.py @@ -14,6 +14,7 @@ DATA_COORDINATOR = "risco" EVENTS_COORDINATOR = "risco_events" DEFAULT_SCAN_INTERVAL = 30 +DEFAULT_CONCURRENCY = 4 TYPE_LOCAL = "local" @@ -25,6 +26,7 @@ CONF_CODE_DISARM_REQUIRED = "code_disarm_required" CONF_RISCO_STATES_TO_HA = "risco_states_to_ha" CONF_HA_STATES_TO_RISCO = "ha_states_to_risco" CONF_COMMUNICATION_DELAY = "communication_delay" +CONF_CONCURRENCY = "concurrency" RISCO_GROUPS = ["A", "B", "C", "D"] RISCO_ARM = "arm" @@ -44,9 +46,13 @@ DEFAULT_HA_STATES_TO_RISCO = { } DEFAULT_OPTIONS = { - CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_CODE_ARM_REQUIRED: False, CONF_CODE_DISARM_REQUIRED: False, CONF_RISCO_STATES_TO_HA: DEFAULT_RISCO_STATES_TO_HA, CONF_HA_STATES_TO_RISCO: DEFAULT_HA_STATES_TO_RISCO, } + +DEFAULT_ADVANCED_OPTIONS = { + CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, + CONF_CONCURRENCY: DEFAULT_CONCURRENCY, +} diff --git a/homeassistant/components/risco/strings.json b/homeassistant/components/risco/strings.json index 69d7e571f43..e35b13394cb 100644 --- a/homeassistant/components/risco/strings.json +++ b/homeassistant/components/risco/strings.json @@ -36,7 +36,8 @@ "init": { "title": "Configure options", "data": { - "scan_interval": "How often to poll Risco (in seconds)", + "scan_interval": "How often to poll Risco Cloud (in seconds)", + "concurrency": "Maximum concurrent requests in Risco local", "code_arm_required": "Require PIN to arm", "code_disarm_required": "Require PIN to disarm" } diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index d031f4e8542..db39447c69a 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -46,11 +46,15 @@ TEST_HA_TO_RISCO = { } TEST_OPTIONS = { - "scan_interval": 10, "code_arm_required": True, "code_disarm_required": True, } +TEST_ADVANCED_OPTIONS = { + "scan_interval": 10, + "concurrency": 3, +} + async def test_cloud_form(hass: HomeAssistant) -> None: """Test we get the cloud form.""" @@ -387,6 +391,53 @@ async def test_options_flow(hass: HomeAssistant) -> None: } +async def test_advanced_options_flow(hass: HomeAssistant) -> None: + """Test options flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_CLOUD_DATA["username"], + data=TEST_CLOUD_DATA, + ) + + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init( + entry.entry_id, context={"show_advanced_options": True} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert "concurrency" in result["data_schema"].schema + assert "scan_interval" in result["data_schema"].schema + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={**TEST_OPTIONS, **TEST_ADVANCED_OPTIONS} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "risco_to_ha" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=TEST_RISCO_TO_HA, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "ha_to_risco" + + with patch("homeassistant.components.risco.async_setup_entry", return_value=True): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=TEST_HA_TO_RISCO, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert entry.options == { + **TEST_OPTIONS, + **TEST_ADVANCED_OPTIONS, + "risco_states_to_ha": TEST_RISCO_TO_HA, + "ha_states_to_risco": TEST_HA_TO_RISCO, + } + + async def test_ha_to_risco_schema(hass: HomeAssistant) -> None: """Test that the schema for the ha-to-risco mapping step is generated properly.""" entry = MockConfigEntry( From fc60426213406142d7824bf64b1f5f75b6201c0d Mon Sep 17 00:00:00 2001 From: On Freund Date: Thu, 11 Apr 2024 00:26:15 +0300 Subject: [PATCH 154/426] Improve Risco exception logging (#115232) --- homeassistant/components/risco/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/risco/__init__.py b/homeassistant/components/risco/__init__.py index 7ca18ea77c5..d25579343c8 100644 --- a/homeassistant/components/risco/__init__.py +++ b/homeassistant/components/risco/__init__.py @@ -101,7 +101,7 @@ async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> b return False async def _error(error: Exception) -> None: - _LOGGER.error("Error in Risco library: %s", error) + _LOGGER.error("Error in Risco library", exc_info=error) entry.async_on_unload(risco.add_error_handler(_error)) From f284273ef6ef8ae5e91ba9826de5da09ea464437 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 8 Apr 2024 11:09:18 -1000 Subject: [PATCH 155/426] Fix misssing timeout in caldav (#115247) --- homeassistant/components/caldav/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/caldav/__init__.py b/homeassistant/components/caldav/__init__.py index eed06a3a005..3111460e968 100644 --- a/homeassistant/components/caldav/__init__.py +++ b/homeassistant/components/caldav/__init__.py @@ -34,6 +34,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], ssl_verify_cert=entry.data[CONF_VERIFY_SSL], + timeout=10, ) try: await hass.async_add_executor_job(client.principal) From 14da34cd4def30c32107e98e48cf5f82ac195e28 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Mon, 8 Apr 2024 22:39:31 -0700 Subject: [PATCH 156/426] Fix Google Tasks parsing of remove responses (#115258) --- homeassistant/components/google_tasks/api.py | 5 +++-- tests/components/google_tasks/test_todo.py | 10 +++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/google_tasks/api.py b/homeassistant/components/google_tasks/api.py index 2658fdedc59..ed70f2f6f44 100644 --- a/homeassistant/components/google_tasks/api.py +++ b/homeassistant/components/google_tasks/api.py @@ -112,8 +112,9 @@ class AsyncConfigEntryAuth: raise GoogleTasksApiError( f"Google Tasks API responded with error ({exception.status_code})" ) from exception - data = json.loads(response) - _raise_if_error(data) + if response: + data = json.loads(response) + _raise_if_error(data) for task_id in task_ids: batch.add( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index 83d419439d7..afbaabe5cd0 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -156,7 +156,7 @@ def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: def create_batch_response_object( - content_ids: list[str], api_responses: list[dict | list | Response] + content_ids: list[str], api_responses: list[dict | list | Response | None] ) -> tuple[Response, bytes]: """Create a batch response in the multipart/mixed format.""" assert len(api_responses) == len(content_ids) @@ -166,7 +166,7 @@ def create_batch_response_object( body = "" if isinstance(api_response, Response): status = api_response.status - else: + elif api_response is not None: body = json.dumps(api_response) content.extend( [ @@ -194,7 +194,7 @@ def create_batch_response_object( def create_batch_response_handler( - api_responses: list[dict | list | Response], + api_responses: list[dict | list | Response | None], ) -> Callable[[Any], tuple[Response, bytes]]: """Create a fake http2lib response handler that supports generating batch responses. @@ -598,11 +598,11 @@ async def test_partial_update_status( [ LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_MULTIPLE, - [EMPTY_RESPONSE, EMPTY_RESPONSE, EMPTY_RESPONSE], # Delete batch + [None, None, None], # Delete batch empty responses LIST_TASKS_RESPONSE, # refresh after delete ] ) - ) + ), ], ) async def test_delete_todo_list_item( From 5723ed28d3fbc739b4295bbcf205220f2e394ff8 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Tue, 9 Apr 2024 18:34:04 +0200 Subject: [PATCH 157/426] Bump forecast-solar lib to v3.1.0 (#115272) --- homeassistant/components/forecast_solar/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/forecast_solar/manifest.json b/homeassistant/components/forecast_solar/manifest.json index 94b603e108c..f5dd79281e6 100644 --- a/homeassistant/components/forecast_solar/manifest.json +++ b/homeassistant/components/forecast_solar/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["forecast-solar==3.0.0"] + "requirements": ["forecast-solar==3.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index cbcae805bd0..88ce477e718 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -883,7 +883,7 @@ fnv-hash-fast==0.5.0 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.0.0 +forecast-solar==3.1.0 # homeassistant.components.fortios fortiosapi==1.0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 77dbd53a73e..5dc0aced2c3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -721,7 +721,7 @@ fnv-hash-fast==0.5.0 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.0.0 +forecast-solar==3.1.0 # homeassistant.components.freebox freebox-api==1.1.0 From 08bd2696960319e349162fb8ae6ad1b708af8938 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Fri, 12 Apr 2024 09:02:22 +0200 Subject: [PATCH 158/426] Support backup of add-ons with hyphens (#115274) Co-authored-by: J. Nick Koston --- homeassistant/components/hassio/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index 90b155aff15..46ba00185f5 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -196,7 +196,7 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( { vol.Optional(ATTR_HOMEASSISTANT): cv.boolean, vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]), + vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]), } ) @@ -211,7 +211,7 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( { vol.Optional(ATTR_HOMEASSISTANT): cv.boolean, vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]), + vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]), } ) From db2005d4ecbf31b65988b59291c9c5c9c0847f15 Mon Sep 17 00:00:00 2001 From: jan iversen Date: Tue, 9 Apr 2024 17:09:55 +0200 Subject: [PATCH 159/426] Bump pymodbus v3.6.7 (#115279) Bump pymodbus v3.6.7. --- homeassistant/components/modbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 956961c7e67..0fe8c7bc42d 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["pymodbus"], "quality_scale": "platinum", - "requirements": ["pymodbus==3.6.6"] + "requirements": ["pymodbus==3.6.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 88ce477e718..ad8dad10170 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1973,7 +1973,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.6 +pymodbus==3.6.7 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5dc0aced2c3..6e9b5fd9d0d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1533,7 +1533,7 @@ pymeteoclimatic==0.1.0 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.6 +pymodbus==3.6.7 # homeassistant.components.monoprice pymonoprice==0.4 From 150145c9b1cefe0146b649122267ccc65c7022dc Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 9 Apr 2024 21:10:22 +0200 Subject: [PATCH 160/426] Bump yt-dlp to 2024.04.09 (#115295) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index c86099a9ea4..940d1d7bb18 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -7,5 +7,5 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.03.10"] + "requirements": ["yt-dlp==2024.04.09"] } diff --git a/requirements_all.txt b/requirements_all.txt index ad8dad10170..ca93ab1f7f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2919,7 +2919,7 @@ youless-api==1.0.1 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.03.10 +yt-dlp==2024.04.09 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6e9b5fd9d0d..162822d8cee 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2257,7 +2257,7 @@ youless-api==1.0.1 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.03.10 +yt-dlp==2024.04.09 # homeassistant.components.zamg zamg==0.3.6 From f941e5d5bbd82bac747c5ce945b328b9349b16e6 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 10 Apr 2024 14:58:35 +0300 Subject: [PATCH 161/426] Fix Aranet failure when the Bluetooth proxy is not providing a device name (#115298) Co-authored-by: J. Nick Koston --- .../components/aranet/config_flow.py | 20 +++++++++---------- homeassistant/components/aranet/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/aranet/__init__.py | 8 ++++++++ tests/components/aranet/test_config_flow.py | 20 +++++++++++++++++++ 6 files changed, 40 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/aranet/config_flow.py b/homeassistant/components/aranet/config_flow.py index cf5f24263dd..db89124c54d 100644 --- a/homeassistant/components/aranet/config_flow.py +++ b/homeassistant/components/aranet/config_flow.py @@ -2,10 +2,10 @@ from __future__ import annotations -import logging from typing import Any from aranet4.client import Aranet4Advertisement, Version as AranetVersion +from bluetooth_data_tools import human_readable_name import voluptuous as vol from homeassistant.components.bluetooth import ( @@ -18,11 +18,15 @@ from homeassistant.data_entry_flow import AbortFlow from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) - MIN_VERSION = AranetVersion(1, 2, 0) +def _title(discovery_info: BluetoothServiceInfoBleak) -> str: + return discovery_info.device.name or human_readable_name( + None, "Aranet", discovery_info.address + ) + + class AranetConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Aranet.""" @@ -61,11 +65,8 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm discovery.""" - assert self._discovered_device is not None - adv = self._discovered_device assert self._discovery_info is not None - discovery_info = self._discovery_info - title = adv.readings.name if adv.readings else discovery_info.name + title = _title(self._discovery_info) if user_input is not None: return self.async_create_entry(title=title, data={}) @@ -101,10 +102,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN): discovery_info.device, discovery_info.advertisement ) if adv.manufacturer_data: - self._discovered_devices[address] = ( - adv.readings.name if adv.readings else discovery_info.name, - adv, - ) + self._discovered_devices[address] = (_title(discovery_info), adv) if not self._discovered_devices: return self.async_abort(reason="no_devices_found") diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json index 0d22a0d1859..152c56e80f3 100644 --- a/homeassistant/components/aranet/manifest.json +++ b/homeassistant/components/aranet/manifest.json @@ -19,5 +19,5 @@ "documentation": "https://www.home-assistant.io/integrations/aranet", "integration_type": "device", "iot_class": "local_push", - "requirements": ["aranet4==2.2.2"] + "requirements": ["aranet4==2.3.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index ca93ab1f7f1..0989932242a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -467,7 +467,7 @@ aprslib==0.7.0 aqualogic==2.6 # homeassistant.components.aranet -aranet4==2.2.2 +aranet4==2.3.3 # homeassistant.components.arcam_fmj arcam-fmj==1.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 162822d8cee..13d853941bf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -428,7 +428,7 @@ apprise==1.7.4 aprslib==0.7.0 # homeassistant.components.aranet -aranet4==2.2.2 +aranet4==2.3.3 # homeassistant.components.arcam_fmj arcam-fmj==1.4.0 diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index b559743067d..4dc9434bd65 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -58,6 +58,14 @@ VALID_DATA_SERVICE_INFO = fake_service_info( }, ) +VALID_DATA_SERVICE_INFO_WITH_NO_NAME = fake_service_info( + None, + "0000fce0-0000-1000-8000-00805f9b34fb", + { + 1794: b'\x21\x00\x02\x01\x00\x00\x00\x01\x8a\x02\xa5\x01\xb1&"Y\x01,\x01\xe8\x00\x88' + }, +) + VALID_ARANET2_DATA_SERVICE_INFO = fake_service_info( "Aranet2 12345", "0000fce0-0000-1000-8000-00805f9b34fb", diff --git a/tests/components/aranet/test_config_flow.py b/tests/components/aranet/test_config_flow.py index f3558c66daf..a779a93cd8f 100644 --- a/tests/components/aranet/test_config_flow.py +++ b/tests/components/aranet/test_config_flow.py @@ -12,6 +12,7 @@ from . import ( NOT_ARANET4_SERVICE_INFO, OLD_FIRMWARE_SERVICE_INFO, VALID_DATA_SERVICE_INFO, + VALID_DATA_SERVICE_INFO_WITH_NO_NAME, ) from tests.common import MockConfigEntry @@ -36,6 +37,25 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" +async def test_async_step_bluetooth_device_without_name(hass: HomeAssistant) -> None: + """Test discovery via bluetooth with a valid device that has no name.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=VALID_DATA_SERVICE_INFO_WITH_NO_NAME, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "bluetooth_confirm" + with patch("homeassistant.components.aranet.async_setup_entry", return_value=True): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Aranet (EEFF)" + assert result2["data"] == {} + assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + async def test_async_step_bluetooth_not_aranet4(hass: HomeAssistant) -> None: """Test that we reject discovery via Bluetooth for an unrelated device.""" result = await hass.config_entries.flow.async_init( From 5c2e9142fa0b41c3d841316a4e624263eee1f178 Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Tue, 9 Apr 2024 21:22:46 +0200 Subject: [PATCH 162/426] Bump zha-quirks to 0.0.114 (#115299) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index e9d75584064..7741673557d 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -24,7 +24,7 @@ "bellows==0.38.1", "pyserial==3.5", "pyserial-asyncio==0.6", - "zha-quirks==0.0.113", + "zha-quirks==0.0.114", "zigpy-deconz==0.23.1", "zigpy==0.63.5", "zigpy-xbee==0.20.1", diff --git a/requirements_all.txt b/requirements_all.txt index 0989932242a..ab009204b9d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2934,7 +2934,7 @@ zeroconf==0.132.0 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.113 +zha-quirks==0.0.114 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 13d853941bf..cfb38551ce3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2269,7 +2269,7 @@ zeroconf==0.132.0 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.113 +zha-quirks==0.0.114 # homeassistant.components.zha zigpy-deconz==0.23.1 From db5343164fd70e333ff2ebc2780195ce05287989 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 10 Apr 2024 02:42:18 -1000 Subject: [PATCH 163/426] Ensure automations do not execute from a trigger if they are disabled (#115305) * Ensure automations are stopped as soon as the stop future is set * revert script changes and move them to #115325 --- .../components/automation/__init__.py | 18 ++++- tests/components/automation/test_init.py | 80 +++++++++++++++++++ 2 files changed, 97 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 0bd2ed87d20..fbebc82225f 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -812,6 +812,22 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity): """Log helper callback.""" self._logger.log(level, "%s %s", msg, self.name, **kwargs) + async def _async_trigger_if_enabled( + self, + run_variables: dict[str, Any], + context: Context | None = None, + skip_condition: bool = False, + ) -> ScriptRunResult | None: + """Trigger automation if enabled. + + If the trigger starts but has a delay, the automation will be triggered + when the delay has passed so we need to make sure its still enabled before + executing the action. + """ + if not self._is_enabled: + return None + return await self.async_trigger(run_variables, context, skip_condition) + async def _async_attach_triggers( self, home_assistant_start: bool ) -> Callable[[], None] | None: @@ -835,7 +851,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity): return await async_initialize_triggers( self.hass, self._trigger_config, - self.async_trigger, + self._async_trigger_if_enabled, DOMAIN, str(self.name), self._log_callback, diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 00a7e6980d7..f6567285ab0 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -2650,3 +2650,83 @@ def test_deprecated_constants( import_and_test_deprecated_constant( caplog, automation, constant_name, replacement.__name__, replacement, "2025.1" ) + + +async def test_automation_turns_off_other_automation( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test an automation that turns off another automation.""" + hass.set_state(CoreState.not_running) + calls = async_mock_service(hass, "persistent_notification", "create") + hass.states.async_set("binary_sensor.presence", "on") + await hass.async_block_till_done() + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: [ + { + "trigger": { + "platform": "state", + "entity_id": "binary_sensor.presence", + "from": "on", + }, + "action": { + "service": "automation.turn_off", + "target": { + "entity_id": "automation.automation_1", + }, + "data": { + "stop_actions": True, + }, + }, + "id": "automation_0", + "mode": "single", + }, + { + "trigger": { + "platform": "state", + "entity_id": "binary_sensor.presence", + "from": "on", + "for": { + "hours": 0, + "minutes": 0, + "seconds": 5, + }, + }, + "action": { + "service": "persistent_notification.create", + "metadata": {}, + "data": { + "message": "Test race", + }, + }, + "id": "automation_1", + "mode": "single", + }, + ] + }, + ) + await hass.async_start() + await hass.async_block_till_done() + + hass.states.async_set("binary_sensor.presence", "off") + await hass.async_block_till_done() + assert len(calls) == 0 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + assert len(calls) == 0 + + await hass.services.async_call( + "automation", + "turn_on", + {"entity_id": "automation.automation_1"}, + blocking=True, + ) + hass.states.async_set("binary_sensor.presence", "off") + await hass.async_block_till_done() + assert len(calls) == 0 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + assert len(calls) == 0 From 0d62e2e92a0972fbcbd3e0fb1be03670cd2bd08e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 9 Apr 2024 18:04:52 -1000 Subject: [PATCH 164/426] Bump bleak-retry-connector 3.5.0 (#115328) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 62296ddd8b8..58009216464 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -15,7 +15,7 @@ "quality_scale": "internal", "requirements": [ "bleak==0.21.1", - "bleak-retry-connector==3.4.0", + "bleak-retry-connector==3.5.0", "bluetooth-adapters==0.18.0", "bluetooth-auto-recovery==1.4.0", "bluetooth-data-tools==1.19.0", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b8c8b0fcb64..b3195eb8291 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -13,7 +13,7 @@ atomicwrites-homeassistant==1.4.1 attrs==23.2.0 awesomeversion==24.2.0 bcrypt==4.1.2 -bleak-retry-connector==3.4.0 +bleak-retry-connector==3.5.0 bleak==0.21.1 bluetooth-adapters==0.18.0 bluetooth-auto-recovery==1.4.0 diff --git a/requirements_all.txt b/requirements_all.txt index ab009204b9d..1c1df97b779 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -556,7 +556,7 @@ bizkaibus==0.1.1 bleak-esphome==1.0.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.4.0 +bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth bleak==0.21.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cfb38551ce3..5467f57c0cd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -478,7 +478,7 @@ bimmer-connected[china]==0.14.6 bleak-esphome==1.0.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.4.0 +bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth bleak==0.21.1 From 98bc7c0ed2602d578211b44b27a3765abf9f5b3f Mon Sep 17 00:00:00 2001 From: jan iversen Date: Wed, 10 Apr 2024 22:09:10 +0200 Subject: [PATCH 165/426] Secure against resetting a non active modbus (#115364) --- homeassistant/components/modbus/__init__.py | 3 +++ tests/components/modbus/test_init.py | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index 94a84d3440d..23ad6ac1be6 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -440,6 +440,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None: """Release modbus resources.""" + if DOMAIN not in hass.data: + _LOGGER.error("Modbus cannot reload, because it was never loaded") + return _LOGGER.info("Modbus reloading") hubs = hass.data[DOMAIN] for name in hubs: diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 0ca4703aa5f..dfbc066fb8a 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -25,6 +25,7 @@ import voluptuous as vol from homeassistant import config as hass_config from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.modbus import async_reset_platform from homeassistant.components.modbus.const import ( ATTR_ADDRESS, ATTR_HUB, @@ -1694,3 +1695,9 @@ async def test_no_entities(hass: HomeAssistant) -> None: ] } assert await async_setup_component(hass, DOMAIN, config) is False + + +async def test_reset_platform(hass: HomeAssistant) -> None: + """Run test for async_reset_platform.""" + await async_reset_platform(hass, "modbus") + assert DOMAIN not in hass.data From d055f987366b5c662fe9cbce09aaa2e8c12f286f Mon Sep 17 00:00:00 2001 From: jan iversen Date: Wed, 10 Apr 2024 21:39:53 +0200 Subject: [PATCH 166/426] Solve modbus test problem (#115376) Fix test. --- tests/components/modbus/conftest.py | 12 +++++++- .../modbus/fixtures/configuration.yaml | 4 +++ tests/components/modbus/test_init.py | 28 ++++++++++--------- 3 files changed, 30 insertions(+), 14 deletions(-) diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index f6eff0fd64b..62cf12958d3 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -52,6 +52,15 @@ def mock_pymodbus_fixture(): """Mock pymodbus.""" mock_pb = mock.AsyncMock() mock_pb.close = mock.MagicMock() + read_result = ReadResult([]) + mock_pb.read_coils.return_value = read_result + mock_pb.read_discrete_inputs.return_value = read_result + mock_pb.read_input_registers.return_value = read_result + mock_pb.read_holding_registers.return_value = read_result + mock_pb.write_register.return_value = read_result + mock_pb.write_registers.return_value = read_result + mock_pb.write_coil.return_value = read_result + mock_pb.write_coils.return_value = read_result with ( mock.patch( "homeassistant.components.modbus.modbus.AsyncModbusTcpClient", @@ -156,7 +165,7 @@ async def mock_pymodbus_exception_fixture(hass, do_exception, mock_modbus): @pytest.fixture(name="mock_pymodbus_return") async def mock_pymodbus_return_fixture(hass, register_words, mock_modbus): """Trigger update call with time_changed event.""" - read_result = ReadResult(register_words) if register_words else None + read_result = ReadResult(register_words if register_words else []) mock_modbus.read_coils.return_value = read_result mock_modbus.read_discrete_inputs.return_value = read_result mock_modbus.read_input_registers.return_value = read_result @@ -165,6 +174,7 @@ async def mock_pymodbus_return_fixture(hass, register_words, mock_modbus): mock_modbus.write_registers.return_value = read_result mock_modbus.write_coil.return_value = read_result mock_modbus.write_coils.return_value = read_result + return mock_modbus @pytest.fixture(name="mock_do_cycle") diff --git a/tests/components/modbus/fixtures/configuration.yaml b/tests/components/modbus/fixtures/configuration.yaml index 0f12ac88686..0a16d85e39d 100644 --- a/tests/components/modbus/fixtures/configuration.yaml +++ b/tests/components/modbus/fixtures/configuration.yaml @@ -3,3 +3,7 @@ modbus: host: "testHost" port: 5001 name: "testModbus" + sensors: + - name: "dummy" + address: 117 + slave: 0 diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index dfbc066fb8a..2c5810a7757 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -1561,7 +1561,7 @@ async def test_shutdown( ], ) async def test_stop_restart( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return ) -> None: """Run test for service stop.""" @@ -1572,7 +1572,7 @@ async def test_stop_restart( await hass.async_block_till_done() assert hass.states.get(entity_id).state == "17" - mock_modbus.reset_mock() + mock_pymodbus_return.reset_mock() caplog.clear() data = { ATTR_HUB: TEST_MODBUS_NAME, @@ -1580,23 +1580,23 @@ async def test_stop_restart( await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - assert mock_modbus.close.called + assert mock_pymodbus_return.close.called assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text - mock_modbus.reset_mock() + mock_pymodbus_return.reset_mock() caplog.clear() await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) await hass.async_block_till_done() - assert not mock_modbus.close.called - assert mock_modbus.connect.called + assert not mock_pymodbus_return.close.called + assert mock_pymodbus_return.connect.called assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text - mock_modbus.reset_mock() + mock_pymodbus_return.reset_mock() caplog.clear() await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) await hass.async_block_till_done() - assert mock_modbus.close.called - assert mock_modbus.connect.called + assert mock_pymodbus_return.close.called + assert mock_pymodbus_return.connect.called assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text @@ -1626,7 +1626,7 @@ async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None: async def test_integration_reload( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_modbus, + mock_pymodbus_return, freezer: FrozenDateTimeFactory, ) -> None: """Run test for integration reload.""" @@ -1647,7 +1647,7 @@ async def test_integration_reload( @pytest.mark.parametrize("do_config", [{}]) async def test_integration_reload_failed( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return ) -> None: """Run test for integration connect failure on reload.""" caplog.set_level(logging.INFO) @@ -1656,7 +1656,9 @@ async def test_integration_reload_failed( yaml_path = get_fixture_path("configuration.yaml", "modbus") with ( mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path), - mock.patch.object(mock_modbus, "connect", side_effect=ModbusException("error")), + mock.patch.object( + mock_pymodbus_return, "connect", side_effect=ModbusException("error") + ), ): await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True) await hass.async_block_till_done() @@ -1667,7 +1669,7 @@ async def test_integration_reload_failed( @pytest.mark.parametrize("do_config", [{}]) async def test_integration_setup_failed( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return ) -> None: """Run test for integration setup on reload.""" with mock.patch.object( From 4aca39b49e8d62d47d247a8a19a2d4957da49091 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 10 Apr 2024 11:38:34 -1000 Subject: [PATCH 167/426] Fix deadlock in holidays dynamic loading (#115385) --- homeassistant/components/holiday/__init__.py | 23 ++++++++++++++++- homeassistant/components/workday/__init__.py | 27 +++++++++++++++----- 2 files changed, 42 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/holiday/__init__.py b/homeassistant/components/holiday/__init__.py index 4f2c593d38e..c9a58f29215 100644 --- a/homeassistant/components/holiday/__init__.py +++ b/homeassistant/components/holiday/__init__.py @@ -2,15 +2,36 @@ from __future__ import annotations +from functools import partial + +from holidays import country_holidays + from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_COUNTRY, Platform from homeassistant.core import HomeAssistant +from homeassistant.setup import SetupPhases, async_pause_setup + +from .const import CONF_PROVINCE PLATFORMS: list[Platform] = [Platform.CALENDAR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Holiday from a config entry.""" + country: str = entry.data[CONF_COUNTRY] + province: str | None = entry.data.get(CONF_PROVINCE) + + # We only import here to ensure that that its not imported later + # in the event loop since the platforms will call country_holidays + # which loads python code from disk. + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES): + # import executor job is used here because multiple integrations use + # the holidays library and it is not thread safe to import it in parallel + # https://github.com/python/cpython/issues/83065 + await hass.async_add_import_executor_job( + partial(country_holidays, country, subdiv=province) + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/workday/__init__.py b/homeassistant/components/workday/__init__.py index 077a6710b8d..f25cf41b992 100644 --- a/homeassistant/components/workday/__init__.py +++ b/homeassistant/components/workday/__init__.py @@ -11,6 +11,7 @@ from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.setup import SetupPhases, async_pause_setup from .const import CONF_PROVINCE, DOMAIN, PLATFORMS @@ -23,7 +24,11 @@ async def _async_validate_country_and_province( if not country: return try: - await hass.async_add_executor_job(country_holidays, country) + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES): + # import executor job is used here because multiple integrations use + # the holidays library and it is not thread safe to import it in parallel + # https://github.com/python/cpython/issues/83065 + await hass.async_add_import_executor_job(country_holidays, country) except NotImplementedError as ex: async_create_issue( hass, @@ -41,9 +46,13 @@ async def _async_validate_country_and_province( if not province: return try: - await hass.async_add_executor_job( - partial(country_holidays, country, subdiv=province) - ) + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES): + # import executor job is used here because multiple integrations use + # the holidays library and it is not thread safe to import it in parallel + # https://github.com/python/cpython/issues/83065 + await hass.async_add_import_executor_job( + partial(country_holidays, country, subdiv=province) + ) except NotImplementedError as ex: async_create_issue( hass, @@ -73,9 +82,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await _async_validate_country_and_province(hass, entry, country, province) if country and CONF_LANGUAGE not in entry.options: - cls: HolidayBase = await hass.async_add_executor_job( - partial(country_holidays, country, subdiv=province) - ) + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES): + # import executor job is used here because multiple integrations use + # the holidays library and it is not thread safe to import it in parallel + # https://github.com/python/cpython/issues/83065 + cls: HolidayBase = await hass.async_add_import_executor_job( + partial(country_holidays, country, subdiv=province) + ) default_language = cls.default_language new_options = entry.options.copy() new_options[CONF_LANGUAGE] = default_language From 5fa06e5a9c1c364e5e879bc371bfda57a5ad4da7 Mon Sep 17 00:00:00 2001 From: Jessica Smith <8505845+NodeJSmith@users.noreply.github.com> Date: Thu, 11 Apr 2024 09:26:05 -0500 Subject: [PATCH 168/426] Bump whirlpool-sixth-sense to 0.18.8 (#115393) bump whirlpool to 0.18.8 --- homeassistant/components/whirlpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/whirlpool/manifest.json b/homeassistant/components/whirlpool/manifest.json index ee7861588ed..5618a3f61cb 100644 --- a/homeassistant/components/whirlpool/manifest.json +++ b/homeassistant/components/whirlpool/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["whirlpool"], - "requirements": ["whirlpool-sixth-sense==0.18.7"] + "requirements": ["whirlpool-sixth-sense==0.18.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1c1df97b779..2e109a1ef13 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2850,7 +2850,7 @@ webmin-xmlrpc==0.0.2 webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.7 +whirlpool-sixth-sense==0.18.8 # homeassistant.components.whois whois==0.9.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5467f57c0cd..cd370eb2487 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2197,7 +2197,7 @@ webmin-xmlrpc==0.0.2 webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.7 +whirlpool-sixth-sense==0.18.8 # homeassistant.components.whois whois==0.9.27 From a455e142ac688b43a21343d619f8d0a854ded675 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 11 Apr 2024 19:14:52 -0700 Subject: [PATCH 169/426] Fix bug in rainbird switch when turning off a switch that is already off (#115421) Fix big in rainbird switch when turning off a switch that is already off Co-authored-by: J. Nick Koston --- homeassistant/components/rainbird/switch.py | 3 ++- tests/components/rainbird/test_switch.py | 10 +++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/rainbird/switch.py b/homeassistant/components/rainbird/switch.py index a929f5b875b..7f43553aa41 100644 --- a/homeassistant/components/rainbird/switch.py +++ b/homeassistant/components/rainbird/switch.py @@ -123,7 +123,8 @@ class RainBirdSwitch(CoordinatorEntity[RainbirdUpdateCoordinator], SwitchEntity) # The device reflects the old state for a few moments. Update the # state manually and trigger a refresh after a short debounced delay. - self.coordinator.data.active_zones.remove(self._zone) + if self.is_on: + self.coordinator.data.active_zones.remove(self._zone) self.async_write_ha_state() await self.coordinator.async_request_refresh() diff --git a/tests/components/rainbird/test_switch.py b/tests/components/rainbird/test_switch.py index 0f9a139a69d..068fe03ac33 100644 --- a/tests/components/rainbird/test_switch.py +++ b/tests/components/rainbird/test_switch.py @@ -146,20 +146,24 @@ async def test_switch_on( @pytest.mark.parametrize( - "zone_state_response", - [ZONE_3_ON_RESPONSE], + ("zone_state_response", "start_state"), + [ + (ZONE_3_ON_RESPONSE, "on"), + (ZONE_OFF_RESPONSE, "off"), # Already off + ], ) async def test_switch_off( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, responses: list[AiohttpClientMockResponse], + start_state: str, ) -> None: """Test turning off irrigation switch.""" # Initially the test zone is on zone = hass.states.get("switch.rain_bird_sprinkler_3") assert zone is not None - assert zone.state == "on" + assert zone.state == start_state aioclient_mock.mock_calls.clear() responses.extend( From 2ed1cfd68d74f59188a5dd02bb01d077694a81ae Mon Sep 17 00:00:00 2001 From: Santobert Date: Thu, 11 Apr 2024 21:57:18 +0200 Subject: [PATCH 170/426] Bump pybotvac to 0.0.25 (#115435) Bump pybotvac --- homeassistant/components/neato/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/neato/manifest.json b/homeassistant/components/neato/manifest.json index 1d5edb7ca44..d6eff486b05 100644 --- a/homeassistant/components/neato/manifest.json +++ b/homeassistant/components/neato/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/neato", "iot_class": "cloud_polling", "loggers": ["pybotvac"], - "requirements": ["pybotvac==0.0.24"] + "requirements": ["pybotvac==0.0.25"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2e109a1ef13..4b7909958cb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1718,7 +1718,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.neato -pybotvac==0.0.24 +pybotvac==0.0.25 # homeassistant.components.braviatv pybravia==0.3.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cd370eb2487..624da305f89 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1350,7 +1350,7 @@ pybalboa==1.0.1 pyblackbird==0.6 # homeassistant.components.neato -pybotvac==0.0.24 +pybotvac==0.0.25 # homeassistant.components.braviatv pybravia==0.3.3 From 7f6514b03c6001914b76e2e7dcadc3036d96b659 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 12 Apr 2024 11:50:22 +0200 Subject: [PATCH 171/426] Update frontend to 20240404.2 (#115460) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 028fb28f01b..d711314cabb 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240404.1"] + "requirements": ["home-assistant-frontend==20240404.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b3195eb8291..366f72cd2bc 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -30,7 +30,7 @@ habluetooth==2.4.2 hass-nabucasa==0.78.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240404.1 +home-assistant-frontend==20240404.2 home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 4b7909958cb..194dda7caac 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1077,7 +1077,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240404.1 +home-assistant-frontend==20240404.2 # homeassistant.components.conversation home-assistant-intents==2024.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 624da305f89..dfa71c7ac3e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -876,7 +876,7 @@ hole==0.8.0 holidays==0.46 # homeassistant.components.frontend -home-assistant-frontend==20240404.1 +home-assistant-frontend==20240404.2 # homeassistant.components.conversation home-assistant-intents==2024.4.3 From 62eee52aedf8f1fd28257b7aa9c6212f52ba8548 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 12 Apr 2024 12:00:16 +0200 Subject: [PATCH 172/426] Bump version to 2024.4.3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index e4359f5bbfb..ecfc1c6259c 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "2" +PATCH_VERSION: Final = "3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 9993c8e9cb8..74b6f6fa54e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.2" +version = "2024.4.3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 814b7a4447d4af5155068525401e931d7051e3bd Mon Sep 17 00:00:00 2001 From: Matrix Date: Fri, 19 Apr 2024 09:14:14 +0800 Subject: [PATCH 173/426] Bump yolink-api to 0.4.3 (#115794) --- homeassistant/components/yolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index cd6759b5864..b7bd1d4784f 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.2"] + "requirements": ["yolink-api==0.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 18c4d6a0076..a2ddb9cdb02 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2911,7 +2911,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.2 +yolink-api==0.4.3 # homeassistant.components.youless youless-api==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index aeb38c28aa1..820dccee669 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2261,7 +2261,7 @@ yalexs==3.0.1 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.2 +yolink-api==0.4.3 # homeassistant.components.youless youless-api==1.0.1 From e62ae90d810235568cab80b1298428b5bcad94be Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 19 Apr 2024 05:24:10 +0200 Subject: [PATCH 174/426] Bump `accuweather` to version 3.0.0 (#115820) Bump accuweather to version 3.0.0 Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/accuweather/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index fa651d98efd..24a8180eef8 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -8,6 +8,6 @@ "iot_class": "cloud_polling", "loggers": ["accuweather"], "quality_scale": "platinum", - "requirements": ["accuweather==2.1.1"], + "requirements": ["accuweather==3.0.0"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index a2ddb9cdb02..89084e920ad 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -140,7 +140,7 @@ TwitterAPI==2.7.12 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.1 +accuweather==3.0.0 # homeassistant.components.adax adax==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 820dccee669..61a1b02f151 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -119,7 +119,7 @@ Tami4EdgeAPI==2.1 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.1 +accuweather==3.0.0 # homeassistant.components.adax adax==0.4.0 From 6b6324f48ec045c4a6b451a757ec2d2f0fb89244 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 19 Apr 2024 06:36:43 +0200 Subject: [PATCH 175/426] Bump aiounifi to v75 (#115819) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 05dc2189908..305400a4b9d 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==74"], + "requirements": ["aiounifi==75"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 89084e920ad..c33bcedc61d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -383,7 +383,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==75 # homeassistant.components.vlc_telnet aiovlc==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 61a1b02f151..28be0f25568 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==75 # homeassistant.components.vlc_telnet aiovlc==0.1.0 From 4cce75177a6abca12bf0603678b8fb3d940ef97a Mon Sep 17 00:00:00 2001 From: Joe Neuman Date: Thu, 18 Apr 2024 22:56:37 -0700 Subject: [PATCH 176/426] Add get_torrents service to qBittorrent integration (#106501) * Upgrade QBittorrent integration to show torrents This brings the QBittorrent integration to be more in line with the Transmission integration. It updates how the integration is written, along with adding sensors for Active Torrents, Inactive Torrents, Paused Torrents, Total Torrents, Seeding Torrents, Started Torrents. * Remove unused stuff * Correct name in comments * Make get torrents a service with a response * Add new sensors * remove service * Add service with response to get torrents list This adds a service with a response to be able to get the list of torrents within qBittorrent * update * update from rebase * Update strings.json * Update helpers.py * Update to satisfy lint * add func comment * fix lint issues * another update attempt * Fix helpers * Remove unneccesary part in services.yaml and add translations * Fix return * Add tests * Fix test * Improve tests * Fix issue from rebase * Add icon for get_torrents service * Make get torrents a service with a response * remove service * Add service with response to get torrents list This adds a service with a response to be able to get the list of torrents within qBittorrent * Update to satisfy lint * Handle multiple installed integrations * fix lint issue * Set return types for helper methods * Create the service method in async_setup * Add CONFIG_SCHEMA * Add get_all_torrents service * fix lint issues * Add return types and ServiceValidationError(s) * Fix naming * Update translations * Fix tests --- .../components/qbittorrent/__init__.py | 90 ++++++++++++++- homeassistant/components/qbittorrent/const.py | 7 ++ .../components/qbittorrent/coordinator.py | 27 ++++- .../components/qbittorrent/helpers.py | 48 ++++++++ .../components/qbittorrent/icons.json | 4 + .../components/qbittorrent/services.yaml | 35 ++++++ .../components/qbittorrent/strings.json | 37 ++++++ tests/components/qbittorrent/test_helpers.py | 108 ++++++++++++++++++ 8 files changed, 349 insertions(+), 7 deletions(-) create mode 100644 homeassistant/components/qbittorrent/services.yaml create mode 100644 tests/components/qbittorrent/test_helpers.py diff --git a/homeassistant/components/qbittorrent/__init__.py b/homeassistant/components/qbittorrent/__init__.py index 7b1a38b7e31..84f080c4d49 100644 --- a/homeassistant/components/qbittorrent/__init__.py +++ b/homeassistant/components/qbittorrent/__init__.py @@ -1,29 +1,111 @@ """The qbittorrent component.""" import logging +from typing import Any from qbittorrent.client import LoginRequired from requests.exceptions import RequestException from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_ID, CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, Platform, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse +from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN +from .const import ( + DOMAIN, + SERVICE_GET_ALL_TORRENTS, + SERVICE_GET_TORRENTS, + STATE_ATTR_ALL_TORRENTS, + STATE_ATTR_TORRENTS, + TORRENT_FILTER, +) from .coordinator import QBittorrentDataCoordinator -from .helpers import setup_client +from .helpers import format_torrents, setup_client _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + PLATFORMS = [Platform.SENSOR] +CONF_ENTRY = "entry" + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up qBittorrent services.""" + + async def handle_get_torrents(service_call: ServiceCall) -> dict[str, Any] | None: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(service_call.data[ATTR_DEVICE_ID]) + + if device_entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device", + translation_placeholders={ + "device_id": service_call.data[ATTR_DEVICE_ID] + }, + ) + + entry_id = None + + for key, value in device_entry.identifiers: + if key == DOMAIN: + entry_id = value + break + else: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_entry_id", + translation_placeholders={"device_id": entry_id or ""}, + ) + + coordinator: QBittorrentDataCoordinator = hass.data[DOMAIN][entry_id] + items = await coordinator.get_torrents(service_call.data[TORRENT_FILTER]) + info = format_torrents(items) + return { + STATE_ATTR_TORRENTS: info, + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_TORRENTS, + handle_get_torrents, + supports_response=SupportsResponse.ONLY, + ) + + async def handle_get_all_torrents( + service_call: ServiceCall, + ) -> dict[str, Any] | None: + torrents = {} + + for key, value in hass.data[DOMAIN].items(): + coordinator: QBittorrentDataCoordinator = value + items = await coordinator.get_torrents(service_call.data[TORRENT_FILTER]) + torrents[key] = format_torrents(items) + + return { + STATE_ATTR_ALL_TORRENTS: torrents, + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_ALL_TORRENTS, + handle_get_all_torrents, + supports_response=SupportsResponse.ONLY, + ) + + return True + async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up qBittorrent from a config entry.""" diff --git a/homeassistant/components/qbittorrent/const.py b/homeassistant/components/qbittorrent/const.py index d8fe2c012a3..73e29d06f40 100644 --- a/homeassistant/components/qbittorrent/const.py +++ b/homeassistant/components/qbittorrent/const.py @@ -7,6 +7,13 @@ DOMAIN: Final = "qbittorrent" DEFAULT_NAME = "qBittorrent" DEFAULT_URL = "http://127.0.0.1:8080" +STATE_ATTR_TORRENTS = "torrents" +STATE_ATTR_ALL_TORRENTS = "all_torrents" + STATE_UP_DOWN = "up_down" STATE_SEEDING = "seeding" STATE_DOWNLOADING = "downloading" + +SERVICE_GET_TORRENTS = "get_torrents" +SERVICE_GET_ALL_TORRENTS = "get_all_torrents" +TORRENT_FILTER = "torrent_filter" diff --git a/homeassistant/components/qbittorrent/coordinator.py b/homeassistant/components/qbittorrent/coordinator.py index 32ce4cf9711..850bcf15ca2 100644 --- a/homeassistant/components/qbittorrent/coordinator.py +++ b/homeassistant/components/qbittorrent/coordinator.py @@ -10,7 +10,7 @@ from qbittorrent import Client from qbittorrent.client import LoginRequired from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -19,11 +19,18 @@ _LOGGER = logging.getLogger(__name__) class QBittorrentDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): - """Coordinator for updating QBittorrent data.""" + """Coordinator for updating qBittorrent data.""" def __init__(self, hass: HomeAssistant, client: Client) -> None: """Initialize coordinator.""" self.client = client + # self.main_data: dict[str, int] = {} + self.total_torrents: dict[str, int] = {} + self.active_torrents: dict[str, int] = {} + self.inactive_torrents: dict[str, int] = {} + self.paused_torrents: dict[str, int] = {} + self.seeding_torrents: dict[str, int] = {} + self.started_torrents: dict[str, int] = {} super().__init__( hass, @@ -33,7 +40,21 @@ class QBittorrentDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) async def _async_update_data(self) -> dict[str, Any]: + """Async method to update QBittorrent data.""" try: return await self.hass.async_add_executor_job(self.client.sync_main_data) except LoginRequired as exc: - raise ConfigEntryError("Invalid authentication") from exc + raise HomeAssistantError(str(exc)) from exc + + async def get_torrents(self, torrent_filter: str) -> list[dict[str, Any]]: + """Async method to get QBittorrent torrents.""" + try: + torrents = await self.hass.async_add_executor_job( + lambda: self.client.torrents(filter=torrent_filter) + ) + except LoginRequired as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="login_error" + ) from exc + + return torrents diff --git a/homeassistant/components/qbittorrent/helpers.py b/homeassistant/components/qbittorrent/helpers.py index b9c29675473..bbe53765f8b 100644 --- a/homeassistant/components/qbittorrent/helpers.py +++ b/homeassistant/components/qbittorrent/helpers.py @@ -1,5 +1,8 @@ """Helper functions for qBittorrent.""" +from datetime import UTC, datetime +from typing import Any + from qbittorrent.client import Client @@ -10,3 +13,48 @@ def setup_client(url: str, username: str, password: str, verify_ssl: bool) -> Cl # Get an arbitrary attribute to test if connection succeeds client.get_alternative_speed_status() return client + + +def seconds_to_hhmmss(seconds) -> str: + """Convert seconds to HH:MM:SS format.""" + if seconds == 8640000: + return "None" + + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" + + +def format_unix_timestamp(timestamp) -> str: + """Format a UNIX timestamp to a human-readable date.""" + dt_object = datetime.fromtimestamp(timestamp, tz=UTC) + return dt_object.isoformat() + + +def format_progress(torrent) -> str: + """Format the progress of a torrent.""" + progress = torrent["progress"] + progress = float(progress) * 100 + return f"{progress:.2f}" + + +def format_torrents(torrents: list[dict[str, Any]]) -> dict[str, dict[str, Any]]: + """Format a list of torrents.""" + value = {} + for torrent in torrents: + value[torrent["name"]] = format_torrent(torrent) + + return value + + +def format_torrent(torrent) -> dict[str, Any]: + """Format a single torrent.""" + value = {} + value["id"] = torrent["hash"] + value["added_date"] = format_unix_timestamp(torrent["added_on"]) + value["percent_done"] = format_progress(torrent) + value["status"] = torrent["state"] + value["eta"] = seconds_to_hhmmss(torrent["eta"]) + value["ratio"] = "{:.2f}".format(float(torrent["ratio"])) + + return value diff --git a/homeassistant/components/qbittorrent/icons.json b/homeassistant/components/qbittorrent/icons.json index bb458c751e1..68fc1020dae 100644 --- a/homeassistant/components/qbittorrent/icons.json +++ b/homeassistant/components/qbittorrent/icons.json @@ -8,5 +8,9 @@ "default": "mdi:cloud-upload" } } + }, + "services": { + "get_torrents": "mdi:file-arrow-up-down-outline", + "get_all_torrents": "mdi:file-arrow-up-down-outline" } } diff --git a/homeassistant/components/qbittorrent/services.yaml b/homeassistant/components/qbittorrent/services.yaml new file mode 100644 index 00000000000..f7fc6b95f64 --- /dev/null +++ b/homeassistant/components/qbittorrent/services.yaml @@ -0,0 +1,35 @@ +get_torrents: + fields: + device_id: + required: true + selector: + device: + integration: qbittorrent + torrent_filter: + required: true + example: "all" + default: "all" + selector: + select: + options: + - "active" + - "inactive" + - "paused" + - "all" + - "seeding" + - "started" +get_all_torrents: + fields: + torrent_filter: + required: true + example: "all" + default: "all" + selector: + select: + options: + - "active" + - "inactive" + - "paused" + - "all" + - "seeding" + - "started" diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 8b20a3354dd..5376e929429 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -48,5 +48,42 @@ "name": "All torrents" } } + }, + "services": { + "get_torrents": { + "name": "Get torrents", + "description": "Gets a list of current torrents", + "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "Which service to grab the list from" + }, + "torrent_filter": { + "name": "Torrent filter", + "description": "What kind of torrents you want to return, such as All or Active." + } + } + }, + "get_all_torrents": { + "name": "Get all torrents", + "description": "Gets a list of current torrents from all instances of qBittorrent", + "fields": { + "torrent_filter": { + "name": "Torrent filter", + "description": "What kind of torrents you want to return, such as All or Active." + } + } + } + }, + "exceptions": { + "invalid_device": { + "message": "No device with id {device_id} was found" + }, + "invalid_entry_id": { + "message": "No entry with id {device_id} was found" + }, + "login_error": { + "message": "A login error occured. Please check you username and password." + } } } diff --git a/tests/components/qbittorrent/test_helpers.py b/tests/components/qbittorrent/test_helpers.py new file mode 100644 index 00000000000..b308cd33aec --- /dev/null +++ b/tests/components/qbittorrent/test_helpers.py @@ -0,0 +1,108 @@ +"""Test the qBittorrent helpers.""" + +from homeassistant.components.qbittorrent.helpers import ( + format_progress, + format_torrent, + format_torrents, + format_unix_timestamp, + seconds_to_hhmmss, +) +from homeassistant.core import HomeAssistant + + +async def test_seconds_to_hhmmss( + hass: HomeAssistant, +) -> None: + """Test the seconds_to_hhmmss function.""" + assert seconds_to_hhmmss(8640000) == "None" + assert seconds_to_hhmmss(3661) == "01:01:01" + + +async def test_format_unix_timestamp( + hass: HomeAssistant, +) -> None: + """Test the format_unix_timestamp function.""" + assert format_unix_timestamp(1640995200) == "2022-01-01T00:00:00+00:00" + + +async def test_format_progress( + hass: HomeAssistant, +) -> None: + """Test the format_progress function.""" + assert format_progress({"progress": 0.5}) == "50.00" + + +async def test_format_torrents( + hass: HomeAssistant, +) -> None: + """Test the format_torrents function.""" + torrents_data = [ + { + "name": "torrent1", + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + }, + { + "name": "torrent2", + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + }, + ] + + expected_result = { + "torrent1": { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + }, + "torrent2": { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + }, + } + + result = format_torrents(torrents_data) + + assert result == expected_result + + +async def test_format_torrent( + hass: HomeAssistant, +) -> None: + """Test the format_torrent function.""" + torrent_data = { + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + } + + expected_result = { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + } + + result = format_torrent(torrent_data) + + assert result == expected_result From ed4f00279e87d0dd171489664d8fce05f80acab1 Mon Sep 17 00:00:00 2001 From: hahn-th <15319212+hahn-th@users.noreply.github.com> Date: Fri, 19 Apr 2024 08:09:48 +0200 Subject: [PATCH 177/426] Show default profiles in homematic cloud climate entity (#107348) * Default names for visible profiles * Increase number of devices in test * remove unnecessary check * Add testcase and split another into two * Add type annotations and docstring * Remove code which not belongs to the PR * Add myself to codeowners --- CODEOWNERS | 2 + .../components/homematicip_cloud/climate.py | 39 +- .../homematicip_cloud/manifest.json | 2 +- .../fixtures/homematicip_cloud.json | 344 ++++++++++++++++++ .../homematicip_cloud/test_climate.py | 89 ++++- .../homematicip_cloud/test_device.py | 2 +- 6 files changed, 462 insertions(+), 16 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index b2de3031cf8..98f52070ed1 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -599,6 +599,8 @@ build.json @home-assistant/supervisor /tests/components/homekit_controller/ @Jc2k @bdraco /homeassistant/components/homematic/ @pvizeli /tests/components/homematic/ @pvizeli +/homeassistant/components/homematicip_cloud/ @hahn-th +/tests/components/homematicip_cloud/ @hahn-th /homeassistant/components/homewizard/ @DCSBL /tests/components/homewizard/ @DCSBL /homeassistant/components/honeywell/ @rdfurman @mkmer diff --git a/homeassistant/components/homematicip_cloud/climate.py b/homeassistant/components/homematicip_cloud/climate.py index b0eb2a9edfa..dd89efed1c9 100644 --- a/homeassistant/components/homematicip_cloud/climate.py +++ b/homeassistant/components/homematicip_cloud/climate.py @@ -13,6 +13,7 @@ from homematicip.aio.group import AsyncHeatingGroup from homematicip.base.enums import AbsenceType from homematicip.device import Switch from homematicip.functionalHomes import IndoorClimateHome +from homematicip.group import HeatingCoolingProfile from homeassistant.components.climate import ( PRESET_AWAY, @@ -35,6 +36,14 @@ from .hap import HomematicipHAP HEATING_PROFILES = {"PROFILE_1": 0, "PROFILE_2": 1, "PROFILE_3": 2} COOLING_PROFILES = {"PROFILE_4": 3, "PROFILE_5": 4, "PROFILE_6": 5} +NICE_PROFILE_NAMES = { + "PROFILE_1": "Default", + "PROFILE_2": "Alternative 1", + "PROFILE_3": "Alternative 2", + "PROFILE_4": "Cooling 1", + "PROFILE_5": "Cooling 2", + "PROFILE_6": "Cooling 3", +} ATTR_PRESET_END_TIME = "preset_end_time" PERMANENT_END_TIME = "permanent" @@ -164,8 +173,9 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): return PRESET_ECO return ( - self._device.activeProfile.name - if self._device.activeProfile.name in self._device_profile_names + self._get_qualified_profile_name(self._device.activeProfile) + if self._get_qualified_profile_name(self._device.activeProfile) + in self._device_profile_names else None ) @@ -218,9 +228,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if preset_mode not in self.preset_modes: - return - if self._device.boostMode and preset_mode != PRESET_BOOST: await self._device.set_boost(False) if preset_mode == PRESET_BOOST: @@ -256,20 +263,30 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): return self._home.get_functionalHome(IndoorClimateHome) @property - def _device_profiles(self) -> list[Any]: + def _device_profiles(self) -> list[HeatingCoolingProfile]: """Return the relevant profiles.""" return [ profile for profile in self._device.profiles - if profile.visible - and profile.name != "" - and profile.index in self._relevant_profile_group + if profile.visible and profile.index in self._relevant_profile_group ] @property def _device_profile_names(self) -> list[str]: """Return a collection of profile names.""" - return [profile.name for profile in self._device_profiles] + return [ + self._get_qualified_profile_name(profile) + for profile in self._device_profiles + ] + + def _get_qualified_profile_name(self, profile: HeatingCoolingProfile) -> str: + """Get a name for the given profile. If exists, this is the name of the profile.""" + if profile.name != "": + return profile.name + if profile.index in NICE_PROFILE_NAMES: + return NICE_PROFILE_NAMES[profile.index] + + return profile.index def _get_profile_idx_by_name(self, profile_name: str) -> int: """Return a profile index by name.""" @@ -277,7 +294,7 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): index_name = [ profile.index for profile in self._device_profiles - if profile.name == profile_name + if self._get_qualified_profile_name(profile) == profile_name ] return relevant_index[index_name[0]] diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 580a0f637c1..9da4e1bee05 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -1,7 +1,7 @@ { "domain": "homematicip_cloud", "name": "HomematicIP Cloud", - "codeowners": [], + "codeowners": ["@hahn-th"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "iot_class": "cloud_push", diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index 83b5f8993bc..922601ca733 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -4791,6 +4791,59 @@ "type": "HEATING_THERMOSTAT", "updateState": "UP_TO_DATE" }, + "3014F71100000000ETRV0013": { + "automaticValveAdaptionNeeded": false, + "availableFirmwareVersion": "2.0.2", + "connectionType": "HMIP_RF", + "firmwareVersion": "2.0.2", + "firmwareVersionInteger": 131074, + "functionalChannels": { + "0": { + "configPending": false, + "deviceId": "3014F71100000000ETRV0013", + "dutyCycle": false, + "functionalChannelType": "DEVICE_OPERATIONLOCK", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000014"], + "index": 0, + "label": "", + "lowBat": false, + "operationLockActive": false, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -58, + "rssiPeerValue": -58, + "unreach": false, + "supportedOptionalFeatures": {} + }, + "1": { + "deviceId": "3014F71100000000ETRV0013", + "functionalChannelType": "HEATING_THERMOSTAT_CHANNEL", + "groupIndex": 1, + "groups": ["00000000-0000-0000-0005-000000000019"], + "index": 1, + "label": "", + "valveActualTemperature": 20.0, + "setPointTemperature": 5.0, + "temperatureOffset": 0.0, + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F71100000000ETRV0013", + "label": "Heizkörperthermostat4", + "lastStatusUpdate": 1524514007132, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manufacturerCode": 1, + "modelId": 269, + "modelType": "HMIP-eTRV", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F71100000000ETRV0013", + "type": "HEATING_THERMOSTAT", + "updateState": "UP_TO_DATE" + }, "3014F7110000000000000014": { "automaticValveAdaptionNeeded": false, "availableFirmwareVersion": "2.0.2", @@ -8535,6 +8588,297 @@ "windowOpenTemperature": 5.0, "windowState": null }, + "00000000-0000-0000-0005-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F71100000000ETRV0013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0000-0005-000000000019", + "label": "Vorzimmer3", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_1", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000059", + "visible": true + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, + "00000000-0000-0000-0001-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F7110000000000000013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0000-0001-000000000019", + "label": "Vorzimmer", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_1", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000059", + "visible": false + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, + "00000000-0000-0001-0001-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F7110000000000000013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0001-0001-000000000019", + "label": "Vorzimmer2", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_1", + "name": "Testprofile", + "profileId": "00000000-0000-0000-0001-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000059", + "visible": true + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, "00000000-AAAA-0000-0000-000000000001": { "actualTemperature": 15.4, "channels": [ diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index 9ede89859dc..f175e2060df 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -1,6 +1,7 @@ """Tests for HomematicIP Cloud climate.""" import datetime +from unittest.mock import patch from homematicip.base.enums import AbsenceType from homematicip.functionalHomes import IndoorClimateHome @@ -15,7 +16,6 @@ from homeassistant.components.climate import ( PRESET_AWAY, PRESET_BOOST, PRESET_ECO, - PRESET_NONE, HVACAction, HVACMode, ) @@ -217,12 +217,14 @@ async def test_hmip_heating_group_heat( ha_state = hass.states.get(entity_id) assert ha_state.state == HVACMode.AUTO + # hvac mode "dry" is not available. expect a valueerror. await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": "dry"}, blocking=True, ) + assert len(hmip_device.mock_calls) == service_call_counter + 24 # Only fire event from last async_manipulate_test_data available. assert hmip_device.mock_calls[-1][0] == "fire_update_event" @@ -429,14 +431,95 @@ async def test_hmip_heating_group_heat_with_radiator( assert ha_state.attributes["min_temp"] == 5.0 assert ha_state.attributes["max_temp"] == 30.0 assert ha_state.attributes["temperature"] == 5.0 - assert ha_state.attributes[ATTR_PRESET_MODE] is None + assert ha_state.attributes[ATTR_PRESET_MODE] == "Default" assert ha_state.attributes[ATTR_PRESET_MODES] == [ - PRESET_NONE, PRESET_BOOST, PRESET_ECO, + "Default", ] +async def test_hmip_heating_profile_default_name( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test visible profile 1 without a name should be displayed as 'Default'.""" + entity_id = "climate.vorzimmer3" + entity_name = "Vorzimmer3" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat4"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert hmip_device + assert ha_state.state == HVACMode.AUTO + assert ha_state.attributes[ATTR_PRESET_MODES] == [ + PRESET_BOOST, + PRESET_ECO, + "Default", + "Alternative 1", + ] + + +async def test_hmip_heating_profile_naming( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test Heating Profile Naming.""" + entity_id = "climate.vorzimmer2" + entity_name = "Vorzimmer2" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat2"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert hmip_device + assert ha_state.state == HVACMode.AUTO + assert ha_state.attributes[ATTR_PRESET_MODES] == [ + PRESET_BOOST, + PRESET_ECO, + "Testprofile", + "Alternative 1", + ] + + +async def test_hmip_heating_profile_name_not_in_list( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test set profile when profile is not in available profiles.""" + expected_profile = "Testprofile" + entity_id = "climate.vorzimmer2" + entity_name = "Vorzimmer2" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat2"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + with patch( + "homeassistant.components.homematicip_cloud.climate.NICE_PROFILE_NAMES", + return_value={}, + ): + await hass.services.async_call( + "climate", + "set_preset_mode", + {"entity_id": entity_id, "preset_mode": expected_profile}, + blocking=True, + ) + + ha_state = hass.states.get(entity_id) + assert ha_state.attributes[ATTR_PRESET_MODE] == expected_profile + + async def test_hmip_climate_services( hass: HomeAssistant, mock_hap_with_service ) -> None: diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 9fc1f518c64..fb7fe7d7deb 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -26,7 +26,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 272 + assert len(mock_hap.hmip_device_by_entity_id) == 278 async def test_hmip_remove_device( From 79c9db408964c14345a4ba3f61e98d6bb5512b3d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 19 Apr 2024 02:43:02 -0500 Subject: [PATCH 178/426] Bump aiodiscover to 2.1.0 (#115823) --- homeassistant/components/dhcp/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index 0d77b997e82..b8abd0a9919 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -15,7 +15,7 @@ "quality_scale": "internal", "requirements": [ "aiodhcpwatcher==1.0.0", - "aiodiscover==2.0.0", + "aiodiscover==2.1.0", "cached_ipaddress==0.3.0" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 7f134b1a93d..ec6998055b0 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -1,7 +1,7 @@ # Automatically generated by gen_requirements_all.py, do not edit aiodhcpwatcher==1.0.0 -aiodiscover==2.0.0 +aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-isal==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index c33bcedc61d..5724a8df371 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -222,7 +222,7 @@ aiocomelit==0.9.0 aiodhcpwatcher==1.0.0 # homeassistant.components.dhcp -aiodiscover==2.0.0 +aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 28be0f25568..c9a3668ca9d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -201,7 +201,7 @@ aiocomelit==0.9.0 aiodhcpwatcher==1.0.0 # homeassistant.components.dhcp -aiodiscover==2.0.0 +aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 From 8d7ef6ea9a043c87b7ceac251e355463e2cac957 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 12:11:08 +0200 Subject: [PATCH 179/426] Bump actions/upload-artifact from 4.3.1 to 4.3.2 (#115842) --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 18 +++++++++--------- .github/workflows/wheels.yml | 4 ++-- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index f02a8bacce8..20ae68fbba4 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a5bafa0c52d..c84ae4513cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -715,7 +715,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: pytest_buckets path: pytest_buckets.txt @@ -811,14 +811,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -933,7 +933,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -941,7 +941,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1056,7 +1056,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1064,7 +1064,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1193,14 +1193,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 7102df0ae4d..c6ca78b7847 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -63,14 +63,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.2 with: name: requirements_diff path: ./requirements_diff.txt From f8738d92631c5397328c545f4295f0036988bf96 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 12:22:09 +0200 Subject: [PATCH 180/426] Bump actions/download-artifact from 4.1.4 to 4.1.5 (#115841) --- .github/workflows/builder.yml | 4 ++-- .github/workflows/ci.yaml | 6 +++--- .github/workflows/wheels.yml | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 20ae68fbba4..9d992608317 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: translations @@ -458,7 +458,7 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: translations diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c84ae4513cc..5f186c32e9a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -776,7 +776,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: pytest_buckets - name: Compile English translations @@ -1088,7 +1088,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1221,7 +1221,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: pattern: coverage-* - name: Upload coverage to Codecov diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index c6ca78b7847..36a9fa1f839 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -91,12 +91,12 @@ jobs: uses: actions/checkout@v4.1.2 - name: Download env_file - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: requirements_diff @@ -129,12 +129,12 @@ jobs: uses: actions/checkout@v4.1.2 - name: Download env_file - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.5 with: name: requirements_diff From 5b082ed6691df748833bc5c0998e91eeffa538d9 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 19 Apr 2024 14:48:18 +0200 Subject: [PATCH 181/426] Add group tests with mixed domain entities (#115849) --- tests/components/group/test_init.py | 107 ++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 0f8d487b340..9c2f14f5d74 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections import OrderedDict from typing import Any from unittest.mock import patch @@ -15,11 +16,15 @@ from homeassistant.const import ( ATTR_ICON, EVENT_HOMEASSISTANT_START, SERVICE_RELOAD, + STATE_CLOSED, STATE_HOME, + STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, + STATE_OPEN, STATE_UNKNOWN, + STATE_UNLOCKED, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er @@ -603,6 +608,108 @@ async def test_is_on(hass: HomeAssistant) -> None: assert not group.is_on(hass, "non.existing") +@pytest.mark.parametrize( + ( + "domains", + "states_old", + "states_new", + "state_ison_group_old", + "state_ison_group_new", + ), + [ + ( + ("light", "light"), + (STATE_ON, STATE_OFF), + (STATE_OFF, STATE_OFF), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "cover"), + (STATE_OPEN, STATE_CLOSED), + (STATE_CLOSED, STATE_CLOSED), + (STATE_OPEN, True), + (STATE_CLOSED, False), + ), + ( + ("lock", "lock"), + (STATE_UNLOCKED, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), + ), + ( + ("cover", "lock"), + (STATE_OPEN, STATE_LOCKED), + (STATE_CLOSED, STATE_LOCKED), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "lock"), + (STATE_OPEN, STATE_UNLOCKED), + (STATE_CLOSED, STATE_LOCKED), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "lock", "light"), + (STATE_OPEN, STATE_LOCKED, STATE_ON), + (STATE_CLOSED, STATE_LOCKED, STATE_OFF), + (STATE_ON, True), + (STATE_OFF, False), + ), + ], +) +async def test_is_on_and_state_mixed_domains( + hass: HomeAssistant, + domains: tuple[str,], + states_old: tuple[str,], + states_new: tuple[str,], + state_ison_group_old: tuple[str, bool], + state_ison_group_new: tuple[str, bool], +) -> None: + """Test is_on method with mixed domains.""" + count = len(domains) + entity_ids = [f"{domains[index]}.test_{index}" for index in range(count)] + for index in range(count): + hass.states.async_set(entity_ids[index], states_old[index]) + + assert not group.is_on(hass, "group.none") + await asyncio.gather( + *[async_setup_component(hass, domain, {}) for domain in set(domains)] + ) + assert await async_setup_component(hass, "group", {}) + await hass.async_block_till_done() + + test_group = await group.Group.async_create_group( + hass, + "init_group", + created_by_service=True, + entity_ids=entity_ids, + icon=None, + mode=None, + object_id=None, + order=None, + ) + await hass.async_block_till_done() + + # Assert on old state + state = hass.states.get(test_group.entity_id) + assert state is not None + assert state.state == state_ison_group_old[0] + assert group.is_on(hass, test_group.entity_id) == state_ison_group_old[1] + + # Switch and assert on new state + for index in range(count): + hass.states.async_set(entity_ids[index], states_new[index]) + await hass.async_block_till_done() + state = hass.states.get(test_group.entity_id) + assert state is not None + assert state.state == state_ison_group_new[0] + assert group.is_on(hass, test_group.entity_id) == state_ison_group_new[1] + + async def test_reloading_groups(hass: HomeAssistant) -> None: """Test reloading the group config.""" assert await async_setup_component( From b462fdbf51b3cdb82556155fd2b765b8b416c5d4 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 19 Apr 2024 15:52:09 +0200 Subject: [PATCH 182/426] Bump `gios` to version 4.0.0 (#115822) Bump gios to version 4.0.0 Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/gios/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index 2e33bc6741e..b509806d07f 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], "quality_scale": "platinum", - "requirements": ["gios==3.2.2"] + "requirements": ["gios==4.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5724a8df371..ea6fcac34c4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -934,7 +934,7 @@ georss-qld-bushfire-alert-client==0.7 getmac==0.9.4 # homeassistant.components.gios -gios==3.2.2 +gios==4.0.0 # homeassistant.components.gitter gitterpy==0.1.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c9a3668ca9d..24034b934b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -766,7 +766,7 @@ georss-qld-bushfire-alert-client==0.7 getmac==0.9.4 # homeassistant.components.gios -gios==3.2.2 +gios==4.0.0 # homeassistant.components.glances glances-api==0.6.0 From ff83d9acfffec74bc8ecdc3c2fb4c879711c715a Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Fri, 19 Apr 2024 16:45:19 +0200 Subject: [PATCH 183/426] Add missing media_player features to Samsung TV (#115788) * add missing features * fix snapshot --- .../components/samsungtv/media_player.py | 16 +++++++++------- .../samsungtv/snapshots/test_init.ambr | 4 ++-- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/samsungtv/media_player.py b/homeassistant/components/samsungtv/media_player.py index 36715c44a9b..ff347431a4a 100644 --- a/homeassistant/components/samsungtv/media_player.py +++ b/homeassistant/components/samsungtv/media_player.py @@ -46,15 +46,17 @@ from .triggers.turn_on import async_get_turn_on_trigger SOURCES = {"TV": "KEY_TV", "HDMI": "KEY_HDMI"} SUPPORT_SAMSUNGTV = ( - MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.VOLUME_STEP - | MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.SELECT_SOURCE - | MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.TURN_OFF + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP ) diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 404b9a6b3af..1b8cf4c999d 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -9,7 +9,7 @@ 'TV', 'HDMI', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'media_player.any', @@ -51,7 +51,7 @@ 'original_name': None, 'platform': 'samsungtv', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'sample-entry-id', 'unit_of_measurement': None, From c108c7df3883d392d3dfd5714b7154aa058afe10 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 19 Apr 2024 17:38:39 +0200 Subject: [PATCH 184/426] Add reauth flow to Google Tasks (#109517) * Add reauth flow to Google Tasks * Update homeassistant/components/google_tasks/config_flow.py Co-authored-by: Jan-Philipp Benecke * Add tests * Reauth * Remove insta reauth * Fix --------- Co-authored-by: Jan-Philipp Benecke --- .../components/google_tasks/__init__.py | 14 +- .../components/google_tasks/config_flow.py | 47 +++++- .../components/google_tasks/const.py | 5 +- .../components/google_tasks/strings.json | 1 + tests/components/google_tasks/conftest.py | 1 + .../google_tasks/test_config_flow.py | 158 ++++++++++++++++-- tests/components/google_tasks/test_init.py | 2 +- 7 files changed, 204 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index b62bd0fe5a2..29a1b20f2bc 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -2,12 +2,12 @@ from __future__ import annotations -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_entry_oauth2_flow from . import api @@ -18,8 +18,6 @@ PLATFORMS: list[Platform] = [Platform.TODO] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Google Tasks from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry @@ -29,10 +27,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: auth = api.AsyncConfigEntryAuth(hass, session) try: await auth.async_get_access_token() + except ClientResponseError as err: + if 400 <= err.status < 500: + raise ConfigEntryAuthFailed( + "OAuth session is not valid, reauth required" + ) from err + raise ConfigEntryNotReady from err except ClientError as err: raise ConfigEntryNotReady from err - hass.data[DOMAIN][entry.entry_id] = auth + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = auth await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/config_flow.py b/homeassistant/components/google_tasks/config_flow.py index a8e283b55c8..a9ef5c7ff23 100644 --- a/homeassistant/components/google_tasks/config_flow.py +++ b/homeassistant/components/google_tasks/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Google Tasks.""" +from collections.abc import Mapping import logging from typing import Any @@ -8,7 +9,7 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError from googleapiclient.http import HttpRequest -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -22,6 +23,8 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None + @property def logger(self) -> logging.Logger: """Return logger.""" @@ -39,11 +42,21 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow.""" + credentials = Credentials(token=data[CONF_TOKEN][CONF_ACCESS_TOKEN]) try: + user_resource = build( + "oauth2", + "v2", + credentials=credentials, + ) + user_resource_cmd: HttpRequest = user_resource.userinfo().get() + user_resource_info = await self.hass.async_add_executor_job( + user_resource_cmd.execute + ) resource = build( "tasks", "v1", - credentials=Credentials(token=data[CONF_TOKEN][CONF_ACCESS_TOKEN]), + credentials=credentials, ) cmd: HttpRequest = resource.tasklists().list() await self.hass.async_add_executor_job(cmd.execute) @@ -56,4 +69,32 @@ class OAuth2FlowHandler( except Exception: # pylint: disable=broad-except self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") - return self.async_create_entry(title=self.flow_impl.name, data=data) + user_id = user_resource_info["id"] + if not self.reauth_entry: + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=user_resource_info["name"], data=data) + + if self.reauth_entry.unique_id == user_id or not self.reauth_entry.unique_id: + return self.async_update_reload_and_abort( + self.reauth_entry, unique_id=user_id, data=data + ) + + return self.async_abort(reason="wrong_account") + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() diff --git a/homeassistant/components/google_tasks/const.py b/homeassistant/components/google_tasks/const.py index 87253486127..0cb04bf1d4e 100644 --- a/homeassistant/components/google_tasks/const.py +++ b/homeassistant/components/google_tasks/const.py @@ -6,7 +6,10 @@ DOMAIN = "google_tasks" OAUTH2_AUTHORIZE = "https://accounts.google.com/o/oauth2/v2/auth" OAUTH2_TOKEN = "https://oauth2.googleapis.com/token" -OAUTH2_SCOPES = ["https://www.googleapis.com/auth/tasks"] +OAUTH2_SCOPES = [ + "https://www.googleapis.com/auth/tasks", + "https://www.googleapis.com/auth/userinfo.profile", +] class TaskStatus(StrEnum): diff --git a/homeassistant/components/google_tasks/strings.json b/homeassistant/components/google_tasks/strings.json index 2cf15f0d93d..4479b34935e 100644 --- a/homeassistant/components/google_tasks/strings.json +++ b/homeassistant/components/google_tasks/strings.json @@ -18,6 +18,7 @@ "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "access_not_configured": "Unable to access the Google API:\n\n{message}", "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index 87ddb2ed81d..7db78af6232 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -54,6 +54,7 @@ def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: """Fixture for a config entry.""" return MockConfigEntry( domain=DOMAIN, + unique_id="123", data={ "auth_implementation": DOMAIN, "token": token_entry, diff --git a/tests/components/google_tasks/test_config_flow.py b/tests/components/google_tasks/test_config_flow.py index 24801959674..5b2d4f11fee 100644 --- a/tests/components/google_tasks/test_config_flow.py +++ b/tests/components/google_tasks/test_config_flow.py @@ -1,9 +1,11 @@ """Test the Google Tasks config flow.""" -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import Mock, patch from googleapiclient.errors import HttpError from httplib2 import Response +import pytest from homeassistant import config_entries from homeassistant.components.google_tasks.const import ( @@ -15,18 +17,37 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_ID = "1234" CLIENT_SECRET = "5678" +@pytest.fixture +def user_identifier() -> str: + """Return a unique user ID.""" + return "123" + + +@pytest.fixture +def setup_userinfo(user_identifier: str) -> Generator[Mock, None, None]: + """Set up userinfo.""" + with patch("homeassistant.components.google_tasks.config_flow.build") as mock: + mock.return_value.userinfo.return_value.get.return_value.execute.return_value = { + "id": user_identifier, + "name": "Test Name", + } + yield mock + + async def test_full_flow( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -44,7 +65,8 @@ async def test_full_flow( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -63,14 +85,13 @@ async def test_full_flow( }, ) - with ( - patch( - "homeassistant.components.google_tasks.async_setup_entry", return_value=True - ) as mock_setup, - patch("homeassistant.components.google_tasks.config_flow.build"), - ): + with patch( + "homeassistant.components.google_tasks.async_setup_entry", return_value=True + ) as mock_setup: result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == "123" + assert result["result"].title == "Test Name" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 @@ -78,9 +99,10 @@ async def test_full_flow( async def test_api_not_enabled( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check flow aborts if api is not enabled.""" result = await hass.config_entries.flow.async_init( @@ -98,7 +120,8 @@ async def test_api_not_enabled( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -137,9 +160,10 @@ async def test_api_not_enabled( async def test_general_exception( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check flow aborts if exception happens.""" result = await hass.config_entries.flow.async_init( @@ -157,7 +181,8 @@ async def test_general_exception( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -184,3 +209,108 @@ async def test_general_exception( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" + + +@pytest.mark.parametrize( + ("user_identifier", "abort_reason", "resulting_access_token", "starting_unique_id"), + [ + ( + "123", + "reauth_successful", + "updated-access-token", + "123", + ), + ( + "123", + "reauth_successful", + "updated-access-token", + None, + ), + ( + "345", + "wrong_account", + "mock-access", + "123", + ), + ], +) +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth, + aioclient_mock: AiohttpClientMocker, + current_request_with_host, + setup_credentials, + setup_userinfo, + user_identifier: str, + abort_reason: str, + resulting_access_token: str, + starting_unique_id: str | None, +) -> None: + """Test the re-authentication case updates the correct config entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=starting_unique_id, + data={ + "token": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access", + } + }, + ) + config_entry.add_to_hass(hass) + + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "updated-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.google_tasks.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] == "abort" + assert result["reason"] == abort_reason + + assert config_entry.unique_id == "123" + assert "token" in config_entry.data + # Verify access token is refreshed + assert config_entry.data["token"]["access_token"] == resulting_access_token + assert config_entry.data["token"]["refresh_token"] == "mock-refresh-token" diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 0abfce87133..1fe0e4a0c36 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -68,7 +68,7 @@ async def test_expired_token_refresh_success( ( time.time() - 3600, http.HTTPStatus.UNAUTHORIZED, - ConfigEntryState.SETUP_RETRY, # Will trigger reauth in the future + ConfigEntryState.SETUP_ERROR, ), ( time.time() - 3600, From cc2e0fd9213e38de0dc1262306c019952d5900db Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 19 Apr 2024 18:18:32 +0200 Subject: [PATCH 185/426] Make Withings recoverable after internet outage (#115124) --- homeassistant/components/withings/__init__.py | 7 +- tests/components/withings/test_init.py | 105 ++++++++++++++++++ 2 files changed, 111 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index 1fe85f180da..0b86a2b5201 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -12,6 +12,7 @@ from dataclasses import dataclass, field from datetime import timedelta from typing import TYPE_CHECKING, Any, cast +from aiohttp import ClientError from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response from aiowithings import NotificationCategory, WithingsClient @@ -274,7 +275,11 @@ class WithingsWebhookManager: async def async_unsubscribe_webhooks(client: WithingsClient) -> None: """Unsubscribe to all Withings webhooks.""" - current_webhooks = await client.list_notification_configurations() + try: + current_webhooks = await client.list_notification_configurations() + except ClientError: + LOGGER.exception("Error when unsubscribing webhooks") + return for webhook_configuration in current_webhooks: LOGGER.debug( diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index ff0a098a7cb..3ade0fb7c3a 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import AsyncMock, patch from urllib.parse import urlparse +from aiohttp import ClientConnectionError from aiohttp.hdrs import METH_HEAD from aiowithings import ( NotificationCategory, @@ -425,6 +426,110 @@ async def test_cloud_disconnect( assert withings.subscribe_notification.call_count == 12 +async def test_internet_disconnect( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we can recover from internet disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object(cloud, "async_active_subscription", return_value=True), + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert withings.revoke_notification_configurations.call_count == 3 + assert withings.subscribe_notification.call_count == 6 + + await hass.async_block_till_done() + + withings.list_notification_configurations.side_effect = ClientConnectionError + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert withings.revoke_notification_configurations.call_count == 3 + + async_mock_cloud_connection_status(hass, True) + await hass.async_block_till_done() + + assert withings.subscribe_notification.call_count == 12 + + +async def test_cloud_disconnect_retry( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we retry to create webhook connection again after cloud disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object( + cloud, "async_active_subscription", return_value=True + ) as mock_async_active_subscription, + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert mock_async_active_subscription.call_count == 3 + + await hass.async_block_till_done() + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 3 + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 4 + + @pytest.mark.parametrize( ("body", "expected_code"), [ From 18d6581523cba47c7910fcdaa8f6c8de41c7e48e Mon Sep 17 00:00:00 2001 From: avee87 <6134677+avee87@users.noreply.github.com> Date: Fri, 19 Apr 2024 17:21:21 +0100 Subject: [PATCH 186/426] Fix Hyperion light not updating state (#115389) --- homeassistant/components/hyperion/sensor.py | 4 ++-- tests/components/hyperion/test_sensor.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hyperion/sensor.py b/homeassistant/components/hyperion/sensor.py index f537c282686..ad972806ae5 100644 --- a/homeassistant/components/hyperion/sensor.py +++ b/homeassistant/components/hyperion/sensor.py @@ -191,13 +191,13 @@ class HyperionVisiblePrioritySensor(HyperionSensor): if priority[KEY_COMPONENTID] == "COLOR": state_value = priority[KEY_VALUE][KEY_RGB] else: - state_value = priority[KEY_OWNER] + state_value = priority.get(KEY_OWNER) attrs = { "component_id": priority[KEY_COMPONENTID], "origin": priority[KEY_ORIGIN], "priority": priority[KEY_PRIORITY], - "owner": priority[KEY_OWNER], + "owner": priority.get(KEY_OWNER), } if priority[KEY_COMPONENTID] == "COLOR": diff --git a/tests/components/hyperion/test_sensor.py b/tests/components/hyperion/test_sensor.py index 65991b4b7e1..8900db177fc 100644 --- a/tests/components/hyperion/test_sensor.py +++ b/tests/components/hyperion/test_sensor.py @@ -159,7 +159,6 @@ async def test_visible_effect_state_changes(hass: HomeAssistant) -> None: KEY_ACTIVE: True, KEY_COMPONENTID: "COLOR", KEY_ORIGIN: "System", - KEY_OWNER: "System", KEY_PRIORITY: 250, KEY_VALUE: {KEY_RGB: [0, 0, 0]}, KEY_VISIBLE: True, From ebbcad17c64bf8eee3b493beef7b89d43811afde Mon Sep 17 00:00:00 2001 From: slyoldfox Date: Fri, 19 Apr 2024 18:22:12 +0200 Subject: [PATCH 187/426] Add scheduled mode to renault charge mode (#115427) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/renault/select.py | 2 +- tests/components/renault/const.py | 21 ++++++++++++++++--- .../renault/snapshots/test_select.ambr | 12 +++++++++++ 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/renault/select.py b/homeassistant/components/renault/select.py index f6c8f73d24b..eb79e197937 100644 --- a/homeassistant/components/renault/select.py +++ b/homeassistant/components/renault/select.py @@ -71,6 +71,6 @@ SENSOR_TYPES: tuple[RenaultSelectEntityDescription, ...] = ( coordinator="charge_mode", data_key="chargeMode", translation_key="charge_mode", - options=["always", "always_charging", "schedule_mode"], + options=["always", "always_charging", "schedule_mode", "scheduled"], ), ) diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index d849c658149..19c40f6ec20 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -127,7 +127,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -363,7 +368,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-clock", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "schedule_mode", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -599,7 +609,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777123_charge_mode", }, diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 7e8356ee070..0722cb5cab3 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -82,6 +82,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -121,6 +122,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -175,6 +177,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -214,6 +217,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -268,6 +272,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -307,6 +312,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -401,6 +407,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -440,6 +447,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -494,6 +502,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -533,6 +542,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -587,6 +597,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -626,6 +637,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , From 4529268544d88aafd315ade68e0a32e3e0ce281f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 19 Apr 2024 11:24:54 -0500 Subject: [PATCH 188/426] Ensure scripts with timeouts of zero timeout immediately (#115830) --- homeassistant/helpers/script.py | 25 ++++- tests/helpers/test_script.py | 178 ++++++++++++++++++++++++++++++++ 2 files changed, 198 insertions(+), 5 deletions(-) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index ea5cc3e571a..62c781ae629 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -650,6 +650,12 @@ class _ScriptRun: # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): self._variables["wait"]["completed"] = True + self._changed() + return + + if timeout == 0: + self._changed() + self._async_handle_timeout() return futures, timeout_handle, timeout_future = self._async_futures_with_timeout( @@ -1078,6 +1084,11 @@ class _ScriptRun: self._variables["wait"] = {"remaining": timeout, "trigger": None} trace_set_result(wait=self._variables["wait"]) + if timeout == 0: + self._changed() + self._async_handle_timeout() + return + futures, timeout_handle, timeout_future = self._async_futures_with_timeout( timeout ) @@ -1108,6 +1119,14 @@ class _ScriptRun: futures, timeout_handle, timeout_future, remove_triggers ) + def _async_handle_timeout(self) -> None: + """Handle timeout.""" + self._variables["wait"]["remaining"] = 0.0 + if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): + self._log(_TIMEOUT_MSG) + trace_set_result(wait=self._variables["wait"], timeout=True) + raise _AbortScript from TimeoutError() + async def _async_wait_with_optional_timeout( self, futures: list[asyncio.Future[None]], @@ -1118,11 +1137,7 @@ class _ScriptRun: try: await asyncio.wait(futures, return_when=asyncio.FIRST_COMPLETED) if timeout_future and timeout_future.done(): - self._variables["wait"]["remaining"] = 0.0 - if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): - self._log(_TIMEOUT_MSG) - trace_set_result(wait=self._variables["wait"], timeout=True) - raise _AbortScript from TimeoutError() + self._async_handle_timeout() finally: if timeout_future and not timeout_future.done() and timeout_handle: timeout_handle.cancel() diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 9d8170f9953..3d662e772e8 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -1311,6 +1311,184 @@ async def test_wait_timeout( assert_action_trace(expected_trace) +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": True, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + @pytest.mark.parametrize( ("continue_on_timeout", "n_events"), [(False, 0), (True, 1), (None, 1)] ) From a8025a8606fa63be570fb43f0f0643068bef8844 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 19 Apr 2024 18:41:29 +0200 Subject: [PATCH 189/426] Fix mutable objects in group registry class (#115797) --- homeassistant/components/group/registry.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index 1441d39d331..6cdb929d60c 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -47,10 +47,12 @@ def _process_group_platform( class GroupIntegrationRegistry: """Class to hold a registry of integrations.""" - on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} - off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} - on_states_by_domain: dict[str, set] = {} - exclude_domains: set = set() + def __init__(self) -> None: + """Imitialize registry.""" + self.on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} + self.off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} + self.on_states_by_domain: dict[str, set[str]] = {} + self.exclude_domains: set[str] = set() def exclude_domain(self) -> None: """Exclude the current domain.""" From f9ff3165af7d9b9f70d580af193aa072369f6b79 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 19 Apr 2024 20:25:07 +0200 Subject: [PATCH 190/426] Bump `nextdns` to version 3.0.0 (#115854) Bump nextdns to version 3.0.0 Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/nextdns/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index 611021d73e4..1e7145ef6d1 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["nextdns"], "quality_scale": "platinum", - "requirements": ["nextdns==2.1.0"] + "requirements": ["nextdns==3.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index ea6fcac34c4..95b83d8f818 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1377,7 +1377,7 @@ nextcloudmonitor==1.5.0 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==2.1.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump nibe==2.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 24034b934b8..1bcd778deef 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1110,7 +1110,7 @@ nextcloudmonitor==1.5.0 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==2.1.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump nibe==2.8.0 From ffd6635c1424f544681165bd54c514cea1a7dd67 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 19 Apr 2024 20:25:57 +0200 Subject: [PATCH 191/426] Bump `nettigo_air_monitor` to version 3.0.0 (#115853) Bump nettigo_air_monitor to version 3.0.0 Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/nam/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index a4ef9af9aee..7b1c584c293 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], "quality_scale": "platinum", - "requirements": ["nettigo-air-monitor==2.2.2"], + "requirements": ["nettigo-air-monitor==3.0.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 95b83d8f818..6186083e13d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1362,7 +1362,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.2 +nettigo-air-monitor==3.0.0 # homeassistant.components.neurio_energy neurio==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1bcd778deef..11b048fdcf6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1098,7 +1098,7 @@ nessclient==1.0.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.2 +nettigo-air-monitor==3.0.0 # homeassistant.components.nexia nexia==2.0.8 From 0ea1564248f290b5d4128d9701632639b8da6c0f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 20 Apr 2024 01:36:46 +0200 Subject: [PATCH 192/426] Bump bluetooth-adapters to 0.19.0 (#115864) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index b41c344bdf2..f6adcbed7d8 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -16,7 +16,7 @@ "requirements": [ "bleak==0.21.1", "bleak-retry-connector==3.5.0", - "bluetooth-adapters==0.18.0", + "bluetooth-adapters==0.19.0", "bluetooth-auto-recovery==1.4.1", "bluetooth-data-tools==1.19.0", "dbus-fast==2.21.1", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ec6998055b0..50c17024b01 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -17,7 +17,7 @@ awesomeversion==24.2.0 bcrypt==4.1.2 bleak-retry-connector==3.5.0 bleak==0.21.1 -bluetooth-adapters==0.18.0 +bluetooth-adapters==0.19.0 bluetooth-auto-recovery==1.4.1 bluetooth-data-tools==1.19.0 cached_ipaddress==0.3.0 diff --git a/requirements_all.txt b/requirements_all.txt index 6186083e13d..a7111a73737 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -576,7 +576,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.18.0 +bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 11b048fdcf6..70c1b2d244b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -491,7 +491,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.18.0 +bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.1 From 354c20a57b4e513a42a4b0b45fec403dc63d79b6 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sat, 20 Apr 2024 12:13:56 +0200 Subject: [PATCH 193/426] Automatic cleanup of entity and device registry in AVM FRITZ!SmartHome (#114601) --- homeassistant/components/fritzbox/__init__.py | 10 +- .../components/fritzbox/coordinator.py | 57 +++++++-- tests/components/fritzbox/test_coordinator.py | 111 ++++++++++++++++++ tests/components/fritzbox/test_init.py | 62 +--------- 4 files changed, 165 insertions(+), 75 deletions(-) create mode 100644 tests/components/fritzbox/test_coordinator.py diff --git a/homeassistant/components/fritzbox/__init__.py b/homeassistant/components/fritzbox/__init__.py index 7f4006768c4..904a86d21ae 100644 --- a/homeassistant/components/fritzbox/__init__.py +++ b/homeassistant/components/fritzbox/__init__.py @@ -51,12 +51,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: has_templates = await hass.async_add_executor_job(fritz.has_templates) LOGGER.debug("enable smarthome templates: %s", has_templates) - coordinator = FritzboxDataUpdateCoordinator(hass, entry, has_templates) - - await coordinator.async_config_entry_first_refresh() - - hass.data[DOMAIN][entry.entry_id][CONF_COORDINATOR] = coordinator - def _update_unique_id(entry: RegistryEntry) -> dict[str, str] | None: """Update unique ID of entity entry.""" if ( @@ -79,6 +73,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await async_migrate_entries(hass, entry.entry_id, _update_unique_id) + coordinator = FritzboxDataUpdateCoordinator(hass, entry.entry_id, has_templates) + await coordinator.async_setup() + hass.data[DOMAIN][entry.entry_id][CONF_COORDINATOR] = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) def logout_fritzbox(event: Event) -> None: diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index c58665f2b5d..a9cfc25b223 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -12,6 +12,7 @@ from requests.exceptions import ConnectionError as RequestConnectionError, HTTPE from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_CONNECTIONS, DOMAIN, LOGGER @@ -28,27 +29,55 @@ class FritzboxCoordinatorData: class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorData]): """Fritzbox Smarthome device data update coordinator.""" + config_entry: ConfigEntry configuration_url: str - def __init__( - self, hass: HomeAssistant, entry: ConfigEntry, has_templates: bool - ) -> None: + def __init__(self, hass: HomeAssistant, name: str, has_templates: bool) -> None: """Initialize the Fritzbox Smarthome device coordinator.""" - self.entry = entry - self.fritz: Fritzhome = hass.data[DOMAIN][self.entry.entry_id][CONF_CONNECTIONS] + super().__init__( + hass, + LOGGER, + name=name, + update_interval=timedelta(seconds=30), + ) + + self.fritz: Fritzhome = hass.data[DOMAIN][self.config_entry.entry_id][ + CONF_CONNECTIONS + ] self.configuration_url = self.fritz.get_prefixed_host() self.has_templates = has_templates self.new_devices: set[str] = set() self.new_templates: set[str] = set() - super().__init__( - hass, - LOGGER, - name=entry.entry_id, - update_interval=timedelta(seconds=30), + self.data = FritzboxCoordinatorData({}, {}) + + async def async_setup(self) -> None: + """Set up the coordinator.""" + await self.async_config_entry_first_refresh() + self.cleanup_removed_devices( + list(self.data.devices) + list(self.data.templates) ) - self.data = FritzboxCoordinatorData({}, {}) + def cleanup_removed_devices(self, avaiable_ains: list[str]) -> None: + """Cleanup entity and device registry from removed devices.""" + entity_reg = er.async_get(self.hass) + for entity in er.async_entries_for_config_entry( + entity_reg, self.config_entry.entry_id + ): + if entity.unique_id.split("_")[0] not in avaiable_ains: + LOGGER.debug("Removing obsolete entity entry %s", entity.entity_id) + entity_reg.async_remove(entity.entity_id) + + device_reg = dr.async_get(self.hass) + identifiers = {(DOMAIN, ain) for ain in avaiable_ains} + for device in dr.async_entries_for_config_entry( + device_reg, self.config_entry.entry_id + ): + if not set(device.identifiers) & identifiers: + LOGGER.debug("Removing obsolete device entry %s", device.name) + device_reg.async_update_device( + device.id, remove_config_entry_id=self.config_entry.entry_id + ) def _update_fritz_devices(self) -> FritzboxCoordinatorData: """Update all fritzbox device data.""" @@ -95,6 +124,12 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.new_devices = device_data.keys() - self.data.devices.keys() self.new_templates = template_data.keys() - self.data.templates.keys() + if ( + self.data.devices.keys() - device_data.keys() + or self.data.templates.keys() - template_data.keys() + ): + self.cleanup_removed_devices(list(device_data) + list(template_data)) + return FritzboxCoordinatorData(devices=device_data, templates=template_data) async def _async_update_data(self) -> FritzboxCoordinatorData: diff --git a/tests/components/fritzbox/test_coordinator.py b/tests/components/fritzbox/test_coordinator.py new file mode 100644 index 00000000000..401fab8f169 --- /dev/null +++ b/tests/components/fritzbox/test_coordinator.py @@ -0,0 +1,111 @@ +"""Tests for the AVM Fritz!Box integration.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import Mock + +from pyfritzhome import LoginError +from requests.exceptions import ConnectionError, HTTPError + +from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_DEVICES +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.dt import utcnow + +from . import FritzDeviceCoverMock, FritzDeviceSwitchMock +from .const import MOCK_CONFIG + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_coordinator_update_after_reboot( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after reboot.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = [HTTPError(), ""] + + assert await hass.config_entries.async_setup(entry.entry_id) + assert fritz().update_devices.call_count == 2 + assert fritz().update_templates.call_count == 1 + assert fritz().get_devices.call_count == 1 + assert fritz().get_templates.call_count == 1 + assert fritz().login.call_count == 2 + + +async def test_coordinator_update_after_password_change( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after password change.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = HTTPError() + fritz().login.side_effect = ["", LoginError("some_user")] + + assert not await hass.config_entries.async_setup(entry.entry_id) + assert fritz().update_devices.call_count == 1 + assert fritz().get_devices.call_count == 0 + assert fritz().get_templates.call_count == 0 + assert fritz().login.call_count == 2 + + +async def test_coordinator_update_when_unreachable( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after reboot.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = [ConnectionError(), ""] + + assert not await hass.config_entries.async_setup(entry.entry_id) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_automatic_registry_cleanup( + hass: HomeAssistant, + fritz: Mock, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test automatic registry cleanup.""" + fritz().get_devices.return_value = [ + FritzDeviceSwitchMock(ain="fake ain switch", name="fake_switch"), + FritzDeviceCoverMock(ain="fake ain cover", name="fake_cover"), + ] + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 11 + assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2 + + fritz().get_devices.return_value = [ + FritzDeviceSwitchMock(ain="fake ain switch", name="fake_switch") + ] + + async_fire_time_changed(hass, utcnow() + timedelta(seconds=35)) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 8 + assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1 diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index 4ee351f7914..8d7e4249fbd 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -6,7 +6,7 @@ from unittest.mock import Mock, call, patch from pyfritzhome import LoginError import pytest -from requests.exceptions import ConnectionError, HTTPError +from requests.exceptions import ConnectionError as RequestConnectionError from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN @@ -80,6 +80,7 @@ async def test_update_unique_id( new_unique_id: str, ) -> None: """Test unique_id update of integration.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] entry = MockConfigEntry( domain=FB_DOMAIN, data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], @@ -138,6 +139,7 @@ async def test_update_unique_id_no_change( unique_id: str, ) -> None: """Test unique_id is not updated of integration.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] entry = MockConfigEntry( domain=FB_DOMAIN, data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], @@ -158,62 +160,6 @@ async def test_update_unique_id_no_change( assert entity_migrated.unique_id == unique_id -async def test_coordinator_update_after_reboot( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after reboot.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = [HTTPError(), ""] - - assert await hass.config_entries.async_setup(entry.entry_id) - assert fritz().update_devices.call_count == 2 - assert fritz().update_templates.call_count == 1 - assert fritz().get_devices.call_count == 1 - assert fritz().get_templates.call_count == 1 - assert fritz().login.call_count == 2 - - -async def test_coordinator_update_after_password_change( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after password change.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = HTTPError() - fritz().login.side_effect = ["", LoginError("some_user")] - - assert not await hass.config_entries.async_setup(entry.entry_id) - assert fritz().update_devices.call_count == 1 - assert fritz().get_devices.call_count == 0 - assert fritz().get_templates.call_count == 0 - assert fritz().login.call_count == 2 - - -async def test_coordinator_update_when_unreachable( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after reboot.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = [ConnectionError(), ""] - - assert not await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_RETRY - - async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: """Test unload and remove of integration.""" fritz().get_devices.return_value = [FritzDeviceSwitchMock()] @@ -325,7 +271,7 @@ async def test_raise_config_entry_not_ready_when_offline(hass: HomeAssistant) -> entry.add_to_hass(hass) with patch( "homeassistant.components.fritzbox.Fritzhome.login", - side_effect=ConnectionError(), + side_effect=RequestConnectionError(), ) as mock_login: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() From c8d52c02c511ea83f3dae20bf4e3ed66e5406185 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 12:31:20 +0200 Subject: [PATCH 194/426] Use snapshot testing in NextDNS (#115879) * Use snapshot testing in NextDNS sensor * Use snapshot testing in NextDNS switch * Use snapshot testing in NextDNS binary sensor * Use snapshot testing in NextDNS button --------- Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- .../nextdns/snapshots/test_binary_sensor.ambr | 2277 ++++++++ .../nextdns/snapshots/test_button.ambr | 47 + .../nextdns/snapshots/test_sensor.ambr | 4749 +++++++++++++++++ .../nextdns/snapshots/test_switch.ambr | 4749 +++++++++++++++++ .../components/nextdns/test_binary_sensor.py | 36 +- tests/components/nextdns/test_button.py | 25 +- tests/components/nextdns/test_sensor.py | 274 +- tests/components/nextdns/test_switch.py | 606 +-- 8 files changed, 11880 insertions(+), 883 deletions(-) create mode 100644 tests/components/nextdns/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/nextdns/snapshots/test_button.ambr create mode 100644 tests/components/nextdns/snapshots/test_sensor.ambr create mode 100644 tests/components/nextdns/snapshots/test_switch.ambr diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..bd4ecbba084 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -0,0 +1,2277 @@ +# serializer version: 1 +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_button.ambr b/tests/components/nextdns/snapshots/test_button.ambr new file mode 100644 index 00000000000..32dc31eea19 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_button[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..34b40433e3b --- /dev/null +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -0,0 +1,4749 @@ +# serializer version: 1 +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries', + 'unique_id': 'xyz12_doh3_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries_ratio', + 'unique_id': 'xyz12_doh3_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries', + 'unique_id': 'xyz12_doh_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries_ratio', + 'unique_id': 'xyz12_doh_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.4', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries', + 'unique_id': 'xyz12_doq_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries_ratio', + 'unique_id': 'xyz12_doq_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.7', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries', + 'unique_id': 'xyz12_dot_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries_ratio', + 'unique_id': 'xyz12_dot_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.1', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'all_queries', + 'unique_id': 'xyz12_all_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries', + 'unique_id': 'xyz12_blocked_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries_ratio', + 'unique_id': 'xyz12_blocked_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_relayed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries relayed', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relayed_queries', + 'unique_id': 'xyz12_relayed_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_relayed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries relayed', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_not_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC not validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'not_validated_queries', + 'unique_id': 'xyz12_not_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_not_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC not validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries', + 'unique_id': 'xyz12_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries_ratio', + 'unique_id': 'xyz12_validated_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries', + 'unique_id': 'xyz12_encrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries_ratio', + 'unique_id': 'xyz12_encrypted_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv4_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv4 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_queries', + 'unique_id': 'xyz12_ipv4_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv4_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv4 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries', + 'unique_id': 'xyz12_ipv6_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries_ratio', + 'unique_id': 'xyz12_ipv6_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries', + 'unique_id': 'xyz12_tcp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries_ratio', + 'unique_id': 'xyz12_tcp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries', + 'unique_id': 'xyz12_udp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries_ratio', + 'unique_id': 'xyz12_udp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.8', + }) +# --- +# name: test_sensor[sensor.fake_profile_unencrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unencrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'unencrypted_queries', + 'unique_id': 'xyz12_unencrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_unencrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Unencrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block 9GAG', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Amazon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_amazon', + 'unique_id': 'xyz12_block_amazon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Amazon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block BeReal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bereal', + 'unique_id': 'xyz12_block_bereal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block BeReal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Blizzard', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_blizzard', + 'unique_id': 'xyz12_block_blizzard', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Blizzard', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block ChatGPT', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_chatgpt', + 'unique_id': 'xyz12_block_chatgpt', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block ChatGPT', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Dailymotion', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dailymotion', + 'unique_id': 'xyz12_block_dailymotion', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Dailymotion', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dating', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dating', + 'unique_id': 'xyz12_block_dating', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dating', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_discord', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Discord', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_discord', + 'unique_id': 'xyz12_block_discord', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Discord', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_discord', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Disney Plus', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disneyplus', + 'unique_id': 'xyz12_block_disneyplus', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Disney Plus', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block eBay', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ebay', + 'unique_id': 'xyz12_block_ebay', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block eBay', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Facebook', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_facebook', + 'unique_id': 'xyz12_block_facebook', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Facebook', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Fortnite', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_fortnite', + 'unique_id': 'xyz12_block_fortnite', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Fortnite', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block gambling', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_gambling', + 'unique_id': 'xyz12_block_gambling', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block gambling', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Google Chat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_google_chat', + 'unique_id': 'xyz12_block_google_chat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Google Chat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block HBO Max', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_hbomax', + 'unique_id': 'xyz12_block_hbomax', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block HBO Max', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Hulu', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xyz12_block_hulu', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Hulu', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Imgur', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_imgur', + 'unique_id': 'xyz12_block_imgur', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Imgur', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Instagram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_instagram', + 'unique_id': 'xyz12_block_instagram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Instagram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block League of Legends', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_leagueoflegends', + 'unique_id': 'xyz12_block_leagueoflegends', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block League of Legends', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Mastodon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_mastodon', + 'unique_id': 'xyz12_block_mastodon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Mastodon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Messenger', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_messenger', + 'unique_id': 'xyz12_block_messenger', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Messenger', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Minecraft', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_minecraft', + 'unique_id': 'xyz12_block_minecraft', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Minecraft', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Netflix', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_netflix', + 'unique_id': 'xyz12_block_netflix', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Netflix', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block online gaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_online_gaming', + 'unique_id': 'xyz12_block_online_gaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block online gaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Pinterest', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_pinterest', + 'unique_id': 'xyz12_block_pinterest', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Pinterest', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block piracy', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_piracy', + 'unique_id': 'xyz12_block_piracy', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block piracy', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block PlayStation Network', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_playstation_network', + 'unique_id': 'xyz12_block_playstation_network', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block PlayStation Network', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_porn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block porn', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_porn', + 'unique_id': 'xyz12_block_porn', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block porn', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_porn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Prime Video', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_primevideo', + 'unique_id': 'xyz12_block_primevideo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Prime Video', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Reddit', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_reddit', + 'unique_id': 'xyz12_block_reddit', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Reddit', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Roblox', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_roblox', + 'unique_id': 'xyz12_block_roblox', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Roblox', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_signal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Signal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_signal', + 'unique_id': 'xyz12_block_signal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Signal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_signal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_skype', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Skype', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_skype', + 'unique_id': 'xyz12_block_skype', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Skype', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_skype', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Snapchat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_snapchat', + 'unique_id': 'xyz12_block_snapchat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Snapchat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block social networks', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_social_networks', + 'unique_id': 'xyz12_block_social_networks', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block social networks', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Spotify', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_spotify', + 'unique_id': 'xyz12_block_spotify', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Spotify', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_steam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Steam', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_steam', + 'unique_id': 'xyz12_block_steam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Steam', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_steam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Telegram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_telegram', + 'unique_id': 'xyz12_block_telegram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Telegram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block TikTok', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tiktok', + 'unique_id': 'xyz12_block_tiktok', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block TikTok', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tinder', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tinder', + 'unique_id': 'xyz12_block_tinder', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tinder', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tumblr', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tumblr', + 'unique_id': 'xyz12_block_tumblr', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tumblr', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Twitch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitch', + 'unique_id': 'xyz12_block_twitch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Twitch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block video streaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_video_streaming', + 'unique_id': 'xyz12_block_video_streaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block video streaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Vimeo', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vimeo', + 'unique_id': 'xyz12_block_vimeo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Vimeo', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block VK', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vk', + 'unique_id': 'xyz12_block_vk', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block VK', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block WhatsApp', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_whatsapp', + 'unique_id': 'xyz12_block_whatsapp', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block WhatsApp', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block X (formerly Twitter)', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitter', + 'unique_id': 'xyz12_block_twitter', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block X (formerly Twitter)', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Xbox Live', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_xboxlive', + 'unique_id': 'xyz12_block_xboxlive', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Xbox Live', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block YouTube', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_youtube', + 'unique_id': 'xyz12_block_youtube', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block YouTube', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Zoom', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_zoom', + 'unique_id': 'xyz12_block_zoom', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Zoom', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr new file mode 100644 index 00000000000..8472f02e8c5 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -0,0 +1,4749 @@ +# serializer version: 1 +# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries', + 'unique_id': 'xyz12_doh3_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries_ratio', + 'unique_id': 'xyz12_doh3_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries', + 'unique_id': 'xyz12_doh_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries_ratio', + 'unique_id': 'xyz12_doh_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.4', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries', + 'unique_id': 'xyz12_doq_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries_ratio', + 'unique_id': 'xyz12_doq_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.7', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries', + 'unique_id': 'xyz12_dot_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries_ratio', + 'unique_id': 'xyz12_dot_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.1', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'all_queries', + 'unique_id': 'xyz12_all_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries', + 'unique_id': 'xyz12_blocked_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries_ratio', + 'unique_id': 'xyz12_blocked_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries relayed', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relayed_queries', + 'unique_id': 'xyz12_relayed_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries relayed', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC not validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'not_validated_queries', + 'unique_id': 'xyz12_not_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC not validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries', + 'unique_id': 'xyz12_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries_ratio', + 'unique_id': 'xyz12_validated_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75.0', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries', + 'unique_id': 'xyz12_encrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries_ratio', + 'unique_id': 'xyz12_encrypted_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.0', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv4 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_queries', + 'unique_id': 'xyz12_ipv4_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv4 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries', + 'unique_id': 'xyz12_ipv6_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries_ratio', + 'unique_id': 'xyz12_ipv6_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries', + 'unique_id': 'xyz12_tcp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries_ratio', + 'unique_id': 'xyz12_tcp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries', + 'unique_id': 'xyz12_udp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries_ratio', + 'unique_id': 'xyz12_udp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.8', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unencrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'unencrypted_queries', + 'unique_id': 'xyz12_unencrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Unencrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_9gag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block 9GAG', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_amazon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Amazon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_amazon', + 'unique_id': 'xyz12_block_amazon', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_amazon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Amazon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_bereal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block BeReal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bereal', + 'unique_id': 'xyz12_block_bereal', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_bereal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block BeReal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_blizzard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Blizzard', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_blizzard', + 'unique_id': 'xyz12_block_blizzard', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_blizzard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Blizzard', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_chatgpt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block ChatGPT', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_chatgpt', + 'unique_id': 'xyz12_block_chatgpt', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_chatgpt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block ChatGPT', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dailymotion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Dailymotion', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dailymotion', + 'unique_id': 'xyz12_block_dailymotion', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dailymotion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Dailymotion', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dating', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dating', + 'unique_id': 'xyz12_block_dating', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dating', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_discord-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_discord', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Discord', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_discord', + 'unique_id': 'xyz12_block_discord', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_discord-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Discord', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_discord', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_disney_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Disney Plus', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disneyplus', + 'unique_id': 'xyz12_block_disneyplus', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_disney_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Disney Plus', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_ebay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block eBay', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ebay', + 'unique_id': 'xyz12_block_ebay', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_ebay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block eBay', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_facebook-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Facebook', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_facebook', + 'unique_id': 'xyz12_block_facebook', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_facebook-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Facebook', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_fortnite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Fortnite', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_fortnite', + 'unique_id': 'xyz12_block_fortnite', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_fortnite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Fortnite', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_gambling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block gambling', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_gambling', + 'unique_id': 'xyz12_block_gambling', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_gambling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block gambling', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_google_chat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Google Chat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_google_chat', + 'unique_id': 'xyz12_block_google_chat', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_google_chat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Google Chat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_hbo_max-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block HBO Max', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_hbomax', + 'unique_id': 'xyz12_block_hbomax', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_hbo_max-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block HBO Max', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_hulu-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Hulu', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xyz12_block_hulu', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_hulu-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Hulu', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_imgur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Imgur', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_imgur', + 'unique_id': 'xyz12_block_imgur', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_imgur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Imgur', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_instagram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Instagram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_instagram', + 'unique_id': 'xyz12_block_instagram', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_instagram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Instagram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_league_of_legends-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block League of Legends', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_leagueoflegends', + 'unique_id': 'xyz12_block_leagueoflegends', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_league_of_legends-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block League of Legends', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_mastodon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Mastodon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_mastodon', + 'unique_id': 'xyz12_block_mastodon', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_mastodon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Mastodon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_messenger-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Messenger', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_messenger', + 'unique_id': 'xyz12_block_messenger', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_messenger-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Messenger', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_minecraft-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Minecraft', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_minecraft', + 'unique_id': 'xyz12_block_minecraft', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_minecraft-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Minecraft', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_netflix-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Netflix', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_netflix', + 'unique_id': 'xyz12_block_netflix', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_netflix-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Netflix', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_online_gaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block online gaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_online_gaming', + 'unique_id': 'xyz12_block_online_gaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_online_gaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block online gaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_pinterest-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Pinterest', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_pinterest', + 'unique_id': 'xyz12_block_pinterest', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_pinterest-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Pinterest', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_piracy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block piracy', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_piracy', + 'unique_id': 'xyz12_block_piracy', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_piracy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block piracy', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_playstation_network-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block PlayStation Network', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_playstation_network', + 'unique_id': 'xyz12_block_playstation_network', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_playstation_network-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block PlayStation Network', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_porn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_porn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block porn', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_porn', + 'unique_id': 'xyz12_block_porn', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_porn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block porn', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_porn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_prime_video-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Prime Video', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_primevideo', + 'unique_id': 'xyz12_block_primevideo', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_prime_video-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Prime Video', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_reddit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Reddit', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_reddit', + 'unique_id': 'xyz12_block_reddit', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_reddit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Reddit', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_roblox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Roblox', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_roblox', + 'unique_id': 'xyz12_block_roblox', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_roblox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Roblox', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_signal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_signal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Signal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_signal', + 'unique_id': 'xyz12_block_signal', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_signal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Signal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_signal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_skype-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_skype', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Skype', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_skype', + 'unique_id': 'xyz12_block_skype', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_skype-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Skype', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_skype', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_snapchat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Snapchat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_snapchat', + 'unique_id': 'xyz12_block_snapchat', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_snapchat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Snapchat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_social_networks-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block social networks', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_social_networks', + 'unique_id': 'xyz12_block_social_networks', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_social_networks-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block social networks', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_spotify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Spotify', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_spotify', + 'unique_id': 'xyz12_block_spotify', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_spotify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Spotify', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_steam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_steam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Steam', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_steam', + 'unique_id': 'xyz12_block_steam', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_steam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Steam', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_steam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_telegram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Telegram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_telegram', + 'unique_id': 'xyz12_block_telegram', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_telegram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Telegram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tiktok-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block TikTok', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tiktok', + 'unique_id': 'xyz12_block_tiktok', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tiktok-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block TikTok', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tinder-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tinder', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tinder', + 'unique_id': 'xyz12_block_tinder', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tinder-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tinder', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tumblr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tumblr', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tumblr', + 'unique_id': 'xyz12_block_tumblr', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tumblr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tumblr', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_twitch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Twitch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitch', + 'unique_id': 'xyz12_block_twitch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_twitch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Twitch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_video_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block video streaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_video_streaming', + 'unique_id': 'xyz12_block_video_streaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_video_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block video streaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_vimeo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Vimeo', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vimeo', + 'unique_id': 'xyz12_block_vimeo', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_vimeo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Vimeo', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_vk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block VK', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vk', + 'unique_id': 'xyz12_block_vk', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_vk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block VK', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_whatsapp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block WhatsApp', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_whatsapp', + 'unique_id': 'xyz12_block_whatsapp', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_whatsapp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block WhatsApp', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_x_formerly_twitter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block X (formerly Twitter)', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitter', + 'unique_id': 'xyz12_block_twitter', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_x_formerly_twitter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block X (formerly Twitter)', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_xbox_live-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Xbox Live', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_xboxlive', + 'unique_id': 'xyz12_block_xboxlive', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_xbox_live-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Xbox Live', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_youtube-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block YouTube', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_youtube', + 'unique_id': 'xyz12_block_youtube', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_youtube-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block YouTube', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_zoom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Zoom', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_zoom', + 'unique_id': 'xyz12_block_zoom', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_zoom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Zoom', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/test_binary_sensor.py b/tests/components/nextdns/test_binary_sensor.py index b69db4798d3..f83e55515e8 100644 --- a/tests/components/nextdns/test_binary_sensor.py +++ b/tests/components/nextdns/test_binary_sensor.py @@ -4,8 +4,9 @@ from datetime import timedelta from unittest.mock import patch from nextdns import ApiError +from syrupy import SnapshotAssertion -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -15,31 +16,20 @@ from . import init_integration, mock_nextdns from tests.common import async_fire_time_changed -async def test_binary_Sensor(hass: HomeAssistant) -> None: +async def test_binary_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the binary sensors.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BINARY_SENSOR]): + entry = await init_integration(hass) - await init_integration(hass) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - state = hass.states.get("binary_sensor.fake_profile_device_connection_status") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("binary_sensor.fake_profile_device_connection_status") - assert entry - assert entry.unique_id == "xyz12_this_device_nextdns_connection_status" - - state = hass.states.get( - "binary_sensor.fake_profile_device_profile_connection_status" - ) - assert state - assert state.state == STATE_OFF - - entry = registry.async_get( - "binary_sensor.fake_profile_device_profile_connection_status" - ) - assert entry - assert entry.unique_id == "xyz12_this_device_profile_connection_status" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_button.py b/tests/components/nextdns/test_button.py index b5f7b01aee2..2007af612c8 100644 --- a/tests/components/nextdns/test_button.py +++ b/tests/components/nextdns/test_button.py @@ -2,8 +2,10 @@ from unittest.mock import patch +from syrupy import SnapshotAssertion + from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -11,19 +13,20 @@ from homeassistant.util import dt as dt_util from . import init_integration -async def test_button(hass: HomeAssistant) -> None: +async def test_button( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the button.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BUTTON]): + entry = await init_integration(hass) - await init_integration(hass) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - state = hass.states.get("button.fake_profile_clear_logs") - assert state - assert state.state == STATE_UNKNOWN - - entry = registry.async_get("button.fake_profile_clear_logs") - assert entry - assert entry.unique_id == "xyz12_clear_logs" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_button_press(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_sensor.py b/tests/components/nextdns/test_sensor.py index 951d220eccb..9c03cf2b215 100644 --- a/tests/components/nextdns/test_sensor.py +++ b/tests/components/nextdns/test_sensor.py @@ -4,9 +4,9 @@ from datetime import timedelta from unittest.mock import patch from nextdns import ApiError +from syrupy import SnapshotAssertion -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -17,270 +17,30 @@ from tests.common import async_fire_time_changed async def test_sensor( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test states of sensors.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - await init_integration(hass) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - state = hass.states.get("sensor.fake_profile_dns_queries") - assert state - assert state.state == "100" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries") - assert entry - assert entry.unique_id == "xyz12_all_queries" - - state = hass.states.get("sensor.fake_profile_dns_queries_blocked") - assert state - assert state.state == "20" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries_blocked") - assert entry - assert entry.unique_id == "xyz12_blocked_queries" - - state = hass.states.get("sensor.fake_profile_dns_queries_blocked_ratio") - assert state - assert state.state == "20.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_queries_blocked_ratio") - assert entry - assert entry.unique_id == "xyz12_blocked_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_queries_relayed") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries_relayed") - assert entry - assert entry.unique_id == "xyz12_relayed_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_https_queries") - assert state - assert state.state == "20" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_https_queries") - assert entry - assert entry.unique_id == "xyz12_doh_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_https_queries_ratio") - assert state - assert state.state == "17.4" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_https_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doh_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_http_3_queries") - assert state - assert state.state == "15" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_http_3_queries") - assert entry - assert entry.unique_id == "xyz12_doh3_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_http_3_queries_ratio") - assert state - assert state.state == "13.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_http_3_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doh3_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_quic_queries") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_quic_queries") - assert entry - assert entry.unique_id == "xyz12_doq_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_quic_queries_ratio") - assert state - assert state.state == "8.7" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_quic_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doq_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_tls_queries") - assert state - assert state.state == "30" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_tls_queries") - assert entry - assert entry.unique_id == "xyz12_dot_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_tls_queries_ratio") - assert state - assert state.state == "26.1" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_tls_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_dot_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dnssec_not_validated_queries") - assert state - assert state.state == "25" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dnssec_not_validated_queries") - assert entry - assert entry.unique_id == "xyz12_not_validated_queries" - - state = hass.states.get("sensor.fake_profile_dnssec_validated_queries") - assert state - assert state.state == "75" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dnssec_validated_queries") - assert entry - assert entry.unique_id == "xyz12_validated_queries" - - state = hass.states.get("sensor.fake_profile_dnssec_validated_queries_ratio") - assert state - assert state.state == "75.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dnssec_validated_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_validated_queries_ratio" - - state = hass.states.get("sensor.fake_profile_encrypted_queries") - assert state - assert state.state == "60" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_encrypted_queries") - assert entry - assert entry.unique_id == "xyz12_encrypted_queries" - - state = hass.states.get("sensor.fake_profile_unencrypted_queries") - assert state - assert state.state == "40" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_unencrypted_queries") - assert entry - assert entry.unique_id == "xyz12_unencrypted_queries" - - state = hass.states.get("sensor.fake_profile_encrypted_queries_ratio") - assert state - assert state.state == "60.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_encrypted_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_encrypted_queries_ratio" - - state = hass.states.get("sensor.fake_profile_ipv4_queries") - assert state - assert state.state == "90" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_ipv4_queries") - assert entry - assert entry.unique_id == "xyz12_ipv4_queries" - - state = hass.states.get("sensor.fake_profile_ipv6_queries") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_ipv6_queries") - assert entry - assert entry.unique_id == "xyz12_ipv6_queries" - - state = hass.states.get("sensor.fake_profile_ipv6_queries_ratio") - assert state - assert state.state == "10.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_ipv6_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_ipv6_queries_ratio" - - state = hass.states.get("sensor.fake_profile_tcp_queries") - assert state - assert state.state == "0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_tcp_queries") - assert entry - assert entry.unique_id == "xyz12_tcp_queries" - - state = hass.states.get("sensor.fake_profile_tcp_queries_ratio") - assert state - assert state.state == "0.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_tcp_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_tcp_queries_ratio" - - state = hass.states.get("sensor.fake_profile_udp_queries") - assert state - assert state.state == "40" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_udp_queries") - assert entry - assert entry.unique_id == "xyz12_udp_queries" - - state = hass.states.get("sensor.fake_profile_udp_queries_ratio") - assert state - assert state.state == "34.8" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_udp_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_udp_queries_ratio" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_availability( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, ) -> None: """Ensure that we mark the entities unavailable correctly when service causes an error.""" - er.async_get(hass) - await init_integration(hass) state = hass.states.get("sensor.fake_profile_dns_queries") diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index a9dd0ba5cbd..5e027c6789c 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -7,6 +7,7 @@ from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( @@ -16,6 +17,7 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -28,602 +30,22 @@ from tests.common import async_fire_time_changed async def test_switch( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test states of the switches.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SWITCH]): + entry = await init_integration(hass) - await init_integration(hass) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - state = hass.states.get("switch.fake_profile_ai_driven_threat_detection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_ai_driven_threat_detection") - assert entry - assert entry.unique_id == "xyz12_ai_threat_detection" - - state = hass.states.get("switch.fake_profile_allow_affiliate_tracking_links") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_allow_affiliate_tracking_links") - assert entry - assert entry.unique_id == "xyz12_allow_affiliate" - - state = hass.states.get("switch.fake_profile_anonymized_edns_client_subnet") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_anonymized_edns_client_subnet") - assert entry - assert entry.unique_id == "xyz12_anonymized_ecs" - - state = hass.states.get("switch.fake_profile_block_bypass_methods") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_bypass_methods") - assert entry - assert entry.unique_id == "xyz12_block_bypass_methods" - - state = hass.states.get("switch.fake_profile_block_child_sexual_abuse_material") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_child_sexual_abuse_material") - assert entry - assert entry.unique_id == "xyz12_block_csam" - - state = hass.states.get("switch.fake_profile_block_disguised_third_party_trackers") - assert state - assert state.state == STATE_ON - - entry = registry.async_get( - "switch.fake_profile_block_disguised_third_party_trackers" - ) - assert entry - assert entry.unique_id == "xyz12_block_disguised_trackers" - - state = hass.states.get("switch.fake_profile_block_dynamic_dns_hostnames") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dynamic_dns_hostnames") - assert entry - assert entry.unique_id == "xyz12_block_ddns" - - state = hass.states.get("switch.fake_profile_block_newly_registered_domains") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_newly_registered_domains") - assert entry - assert entry.unique_id == "xyz12_block_nrd" - - state = hass.states.get("switch.fake_profile_block_page") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_block_page") - assert entry - assert entry.unique_id == "xyz12_block_page" - - state = hass.states.get("switch.fake_profile_block_parked_domains") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_parked_domains") - assert entry - assert entry.unique_id == "xyz12_block_parked_domains" - - state = hass.states.get("switch.fake_profile_cname_flattening") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cname_flattening") - assert entry - assert entry.unique_id == "xyz12_cname_flattening" - - state = hass.states.get("switch.fake_profile_cache_boost") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cache_boost") - assert entry - assert entry.unique_id == "xyz12_cache_boost" - - state = hass.states.get("switch.fake_profile_cryptojacking_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cryptojacking_protection") - assert entry - assert entry.unique_id == "xyz12_cryptojacking_protection" - - state = hass.states.get("switch.fake_profile_dns_rebinding_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_dns_rebinding_protection") - assert entry - assert entry.unique_id == "xyz12_dns_rebinding_protection" - - state = hass.states.get( - "switch.fake_profile_domain_generation_algorithms_protection" - ) - assert state - assert state.state == STATE_ON - - entry = registry.async_get( - "switch.fake_profile_domain_generation_algorithms_protection" - ) - assert entry - assert entry.unique_id == "xyz12_dga_protection" - - state = hass.states.get("switch.fake_profile_force_safesearch") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_force_safesearch") - assert entry - assert entry.unique_id == "xyz12_safesearch" - - state = hass.states.get("switch.fake_profile_force_youtube_restricted_mode") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_force_youtube_restricted_mode") - assert entry - assert entry.unique_id == "xyz12_youtube_restricted_mode" - - state = hass.states.get("switch.fake_profile_google_safe_browsing") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_google_safe_browsing") - assert entry - assert entry.unique_id == "xyz12_google_safe_browsing" - - state = hass.states.get("switch.fake_profile_idn_homograph_attacks_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_idn_homograph_attacks_protection") - assert entry - assert entry.unique_id == "xyz12_idn_homograph_attacks_protection" - - state = hass.states.get("switch.fake_profile_logs") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_logs") - assert entry - assert entry.unique_id == "xyz12_logs" - - state = hass.states.get("switch.fake_profile_threat_intelligence_feeds") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_threat_intelligence_feeds") - assert entry - assert entry.unique_id == "xyz12_threat_intelligence_feeds" - - state = hass.states.get("switch.fake_profile_typosquatting_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_typosquatting_protection") - assert entry - assert entry.unique_id == "xyz12_typosquatting_protection" - - state = hass.states.get("switch.fake_profile_web3") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_web3") - assert entry - assert entry.unique_id == "xyz12_web3" - - state = hass.states.get("switch.fake_profile_block_9gag") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_9gag") - assert entry - assert entry.unique_id == "xyz12_block_9gag" - - state = hass.states.get("switch.fake_profile_block_amazon") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_amazon") - assert entry - assert entry.unique_id == "xyz12_block_amazon" - - state = hass.states.get("switch.fake_profile_block_bereal") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_bereal") - assert entry - assert entry.unique_id == "xyz12_block_bereal" - - state = hass.states.get("switch.fake_profile_block_blizzard") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_blizzard") - assert entry - assert entry.unique_id == "xyz12_block_blizzard" - - state = hass.states.get("switch.fake_profile_block_chatgpt") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_chatgpt") - assert entry - assert entry.unique_id == "xyz12_block_chatgpt" - - state = hass.states.get("switch.fake_profile_block_dailymotion") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dailymotion") - assert entry - assert entry.unique_id == "xyz12_block_dailymotion" - - state = hass.states.get("switch.fake_profile_block_discord") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_discord") - assert entry - assert entry.unique_id == "xyz12_block_discord" - - state = hass.states.get("switch.fake_profile_block_disney_plus") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_disney_plus") - assert entry - assert entry.unique_id == "xyz12_block_disneyplus" - - state = hass.states.get("switch.fake_profile_block_ebay") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_ebay") - assert entry - assert entry.unique_id == "xyz12_block_ebay" - - state = hass.states.get("switch.fake_profile_block_facebook") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_facebook") - assert entry - assert entry.unique_id == "xyz12_block_facebook" - - state = hass.states.get("switch.fake_profile_block_fortnite") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_fortnite") - assert entry - assert entry.unique_id == "xyz12_block_fortnite" - - state = hass.states.get("switch.fake_profile_block_google_chat") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_google_chat") - assert entry - assert entry.unique_id == "xyz12_block_google_chat" - - state = hass.states.get("switch.fake_profile_block_hbo_max") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_hbo_max") - assert entry - assert entry.unique_id == "xyz12_block_hbomax" - - state = hass.states.get("switch.fake_profile_block_hulu") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_hulu") - assert entry - assert entry.unique_id == "xyz12_block_hulu" - - state = hass.states.get("switch.fake_profile_block_imgur") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_imgur") - assert entry - assert entry.unique_id == "xyz12_block_imgur" - - state = hass.states.get("switch.fake_profile_block_instagram") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_instagram") - assert entry - assert entry.unique_id == "xyz12_block_instagram" - - state = hass.states.get("switch.fake_profile_block_league_of_legends") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_league_of_legends") - assert entry - assert entry.unique_id == "xyz12_block_leagueoflegends" - - state = hass.states.get("switch.fake_profile_block_mastodon") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_mastodon") - assert entry - assert entry.unique_id == "xyz12_block_mastodon" - - state = hass.states.get("switch.fake_profile_block_messenger") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_messenger") - assert entry - assert entry.unique_id == "xyz12_block_messenger" - - state = hass.states.get("switch.fake_profile_block_minecraft") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_minecraft") - assert entry - assert entry.unique_id == "xyz12_block_minecraft" - - state = hass.states.get("switch.fake_profile_block_netflix") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_netflix") - assert entry - assert entry.unique_id == "xyz12_block_netflix" - - state = hass.states.get("switch.fake_profile_block_pinterest") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_pinterest") - assert entry - assert entry.unique_id == "xyz12_block_pinterest" - - state = hass.states.get("switch.fake_profile_block_playstation_network") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_playstation_network") - assert entry - assert entry.unique_id == "xyz12_block_playstation_network" - - state = hass.states.get("switch.fake_profile_block_prime_video") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_prime_video") - assert entry - assert entry.unique_id == "xyz12_block_primevideo" - - state = hass.states.get("switch.fake_profile_block_reddit") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_reddit") - assert entry - assert entry.unique_id == "xyz12_block_reddit" - - state = hass.states.get("switch.fake_profile_block_roblox") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_roblox") - assert entry - assert entry.unique_id == "xyz12_block_roblox" - - state = hass.states.get("switch.fake_profile_block_signal") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_signal") - assert entry - assert entry.unique_id == "xyz12_block_signal" - - state = hass.states.get("switch.fake_profile_block_skype") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_skype") - assert entry - assert entry.unique_id == "xyz12_block_skype" - - state = hass.states.get("switch.fake_profile_block_snapchat") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_snapchat") - assert entry - assert entry.unique_id == "xyz12_block_snapchat" - - state = hass.states.get("switch.fake_profile_block_spotify") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_spotify") - assert entry - assert entry.unique_id == "xyz12_block_spotify" - - state = hass.states.get("switch.fake_profile_block_steam") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_steam") - assert entry - assert entry.unique_id == "xyz12_block_steam" - - state = hass.states.get("switch.fake_profile_block_telegram") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_telegram") - assert entry - assert entry.unique_id == "xyz12_block_telegram" - - state = hass.states.get("switch.fake_profile_block_tiktok") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tiktok") - assert entry - assert entry.unique_id == "xyz12_block_tiktok" - - state = hass.states.get("switch.fake_profile_block_tinder") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tinder") - assert entry - assert entry.unique_id == "xyz12_block_tinder" - - state = hass.states.get("switch.fake_profile_block_tumblr") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tumblr") - assert entry - assert entry.unique_id == "xyz12_block_tumblr" - - state = hass.states.get("switch.fake_profile_block_twitch") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_twitch") - assert entry - assert entry.unique_id == "xyz12_block_twitch" - - state = hass.states.get("switch.fake_profile_block_x_formerly_twitter") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_x_formerly_twitter") - assert entry - assert entry.unique_id == "xyz12_block_twitter" - - state = hass.states.get("switch.fake_profile_block_vimeo") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_vimeo") - assert entry - assert entry.unique_id == "xyz12_block_vimeo" - - state = hass.states.get("switch.fake_profile_block_vk") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_vk") - assert entry - assert entry.unique_id == "xyz12_block_vk" - - state = hass.states.get("switch.fake_profile_block_whatsapp") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_whatsapp") - assert entry - assert entry.unique_id == "xyz12_block_whatsapp" - - state = hass.states.get("switch.fake_profile_block_xbox_live") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_xbox_live") - assert entry - assert entry.unique_id == "xyz12_block_xboxlive" - - state = hass.states.get("switch.fake_profile_block_youtube") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_youtube") - assert entry - assert entry.unique_id == "xyz12_block_youtube" - - state = hass.states.get("switch.fake_profile_block_zoom") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_zoom") - assert entry - assert entry.unique_id == "xyz12_block_zoom" - - state = hass.states.get("switch.fake_profile_block_dating") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dating") - assert entry - assert entry.unique_id == "xyz12_block_dating" - - state = hass.states.get("switch.fake_profile_block_gambling") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_gambling") - assert entry - assert entry.unique_id == "xyz12_block_gambling" - - state = hass.states.get("switch.fake_profile_block_online_gaming") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_online_gaming") - assert entry - assert entry.unique_id == "xyz12_block_online_gaming" - - state = hass.states.get("switch.fake_profile_block_piracy") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_piracy") - assert entry - assert entry.unique_id == "xyz12_block_piracy" - - state = hass.states.get("switch.fake_profile_block_porn") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_porn") - assert entry - assert entry.unique_id == "xyz12_block_porn" - - state = hass.states.get("switch.fake_profile_block_social_networks") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_social_networks") - assert entry - assert entry.unique_id == "xyz12_block_social_networks" - - state = hass.states.get("switch.fake_profile_block_video_streaming") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_video_streaming") - assert entry - assert entry.unique_id == "xyz12_block_video_streaming" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_switch_on(hass: HomeAssistant) -> None: From c2450c111214cb760ad05abe5a1347fc0c674d49 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 12:32:11 +0200 Subject: [PATCH 195/426] Use snapshot testing in GIOS sensor (#115876) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- .../gios/snapshots/test_sensor.ambr | 774 ++++++++++++++++++ tests/components/gios/test_sensor.py | 245 +----- 2 files changed, 789 insertions(+), 230 deletions(-) create mode 100644 tests/components/gios/snapshots/test_sensor.ambr diff --git a/tests/components/gios/snapshots/test_sensor.ambr b/tests/components/gios/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c67cc3e4d7c --- /dev/null +++ b/tests/components/gios/snapshots/test_sensor.ambr @@ -0,0 +1,774 @@ +# serializer version: 1 +# name: test_sensor[sensor.home_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aqi', + 'unique_id': '123-aqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Air quality index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_benzene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_benzene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Benzene', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'c6h6', + 'unique_id': '123-c6h6', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_benzene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'friendly_name': 'Home Benzene', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_benzene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23789', + }) +# --- +# name: test_sensor[sensor.home_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co', + 'unique_id': '123-co', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'friendly_name': 'Home Carbon monoxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '251.874', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_nitrogen_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-no2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'nitrogen_dioxide', + 'friendly_name': 'Home Nitrogen dioxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_nitrogen_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.13411', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_nitrogen_dioxide_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'no2_index', + 'unique_id': '123-no2-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Nitrogen dioxide index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_nitrogen_dioxide_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_ozone-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_ozone', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ozone', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-o3', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_ozone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'ozone', + 'friendly_name': 'Home Ozone', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_ozone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.7768', + }) +# --- +# name: test_sensor[sensor.home_ozone_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_ozone_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ozone index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'o3_index', + 'unique_id': '123-o3-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_ozone_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Ozone index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_ozone_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-pm10', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'pm10', + 'friendly_name': 'Home PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.8344', + }) +# --- +# name: test_sensor[sensor.home_pm10_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm10_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10 index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm10_index', + 'unique_id': '123-pm10-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_pm10_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home PM10 index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_pm10_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-pm25', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'pm25', + 'friendly_name': 'Home PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensor[sensor.home_pm2_5_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm2_5_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5 index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_index', + 'unique_id': '123-pm25-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_pm2_5_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home PM2.5 index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_pm2_5_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_sulphur_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sulphur dioxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-so2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'sulphur_dioxide', + 'friendly_name': 'Home Sulphur dioxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_sulphur_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.35478', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_sulphur_dioxide_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sulphur dioxide index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'so2_index', + 'unique_id': '123-so2-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Sulphur dioxide index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_sulphur_dioxide_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_good', + }) +# --- diff --git a/tests/components/gios/test_sensor.py b/tests/components/gios/test_sensor.py index 60e8722ba24..e760e050f2b 100644 --- a/tests/components/gios/test_sensor.py +++ b/tests/components/gios/test_sensor.py @@ -6,23 +6,11 @@ import json from unittest.mock import patch from gios import ApiError +from syrupy import SnapshotAssertion -from homeassistant.components.gios.const import ATTRIBUTION, DOMAIN -from homeassistant.components.sensor import ( - ATTR_OPTIONS, - ATTR_STATE_CLASS, - DOMAIN as PLATFORM, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.const import ( - ATTR_ATTRIBUTION, - ATTR_DEVICE_CLASS, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - STATE_UNAVAILABLE, -) +from homeassistant.components.gios.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as PLATFORM +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -32,223 +20,20 @@ from . import init_integration from tests.common import async_fire_time_changed, load_fixture -async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the sensor.""" - await init_integration(hass) + with patch("homeassistant.components.gios.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - state = hass.states.get("sensor.home_benzene") - assert state - assert state.state == "0.23789" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - assert state.attributes.get(ATTR_ICON) is None + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - entry = entity_registry.async_get("sensor.home_benzene") - assert entry - assert entry.unique_id == "123-c6h6" - - state = hass.states.get("sensor.home_carbon_monoxide") - assert state - assert state.state == "251.874" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_carbon_monoxide") - assert entry - assert entry.unique_id == "123-co" - - state = hass.states.get("sensor.home_nitrogen_dioxide") - assert state - assert state.state == "7.13411" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.NITROGEN_DIOXIDE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_nitrogen_dioxide") - assert entry - assert entry.unique_id == "123-no2" - - state = hass.states.get("sensor.home_nitrogen_dioxide_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_nitrogen_dioxide_index") - assert entry - assert entry.unique_id == "123-no2-index" - - state = hass.states.get("sensor.home_ozone") - assert state - assert state.state == "95.7768" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.OZONE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_ozone") - assert entry - assert entry.unique_id == "123-o3" - - state = hass.states.get("sensor.home_ozone_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_ozone_index") - assert entry - assert entry.unique_id == "123-o3-index" - - state = hass.states.get("sensor.home_pm10") - assert state - assert state.state == "16.8344" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_pm10") - assert entry - assert entry.unique_id == "123-pm10" - - state = hass.states.get("sensor.home_pm10_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_pm10_index") - assert entry - assert entry.unique_id == "123-pm10-index" - - state = hass.states.get("sensor.home_pm2_5") - assert state - assert state.state == "4" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_pm2_5") - assert entry - assert entry.unique_id == "123-pm25" - - state = hass.states.get("sensor.home_pm2_5_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_pm2_5_index") - assert entry - assert entry.unique_id == "123-pm25-index" - - state = hass.states.get("sensor.home_sulphur_dioxide") - assert state - assert state.state == "4.35478" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.SULPHUR_DIOXIDE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_sulphur_dioxide") - assert entry - assert entry.unique_id == "123-so2" - - state = hass.states.get("sensor.home_sulphur_dioxide_index") - assert state - assert state.state == "very_good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_sulphur_dioxide_index") - assert entry - assert entry.unique_id == "123-so2-index" - - state = hass.states.get("sensor.home_air_quality_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_STATE_CLASS) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_air_quality_index") - assert entry - assert entry.unique_id == "123-aqi" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_availability(hass: HomeAssistant) -> None: From c4e7a7af21b761473c1a3d66c2fc30a457df5969 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 12:33:08 +0200 Subject: [PATCH 196/426] Use snapshot testing in Brother sensor (#115875) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- .../brother/snapshots/test_sensor.ambr | 1394 +++++++++++++++++ tests/components/brother/test_sensor.py | 395 +---- 2 files changed, 1420 insertions(+), 369 deletions(-) create mode 100644 tests/components/brother/snapshots/test_sensor.ambr diff --git a/tests/components/brother/snapshots/test_sensor.ambr b/tests/components/brother/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a27c5addd61 --- /dev/null +++ b/tests/components/brother/snapshots/test_sensor.ambr @@ -0,0 +1,1394 @@ +# serializer version: 1 +# name: test_sensors[sensor.hl_l2340dw_b_w_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_b_w_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'B/W pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bw_pages', + 'unique_id': '0123456789_bw_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_b_w_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW B/W pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_b_w_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '709', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_belt_unit_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_belt_unit_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Belt unit remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'belt_unit_remaining_life', + 'unique_id': '0123456789_belt_unit_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_belt_unit_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Belt unit remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_belt_unit_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '97', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_page_counter', + 'unique_id': '0123456789_black_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_remaining_life', + 'unique_id': '0123456789_black_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_remaining_pages', + 'unique_id': '0123456789_black_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_toner_remaining', + 'unique_id': '0123456789_black_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_color_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_color_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Color pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'color_pages', + 'unique_id': '0123456789_color_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_color_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Color pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_color_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '902', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_page_counter', + 'unique_id': '0123456789_cyan_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_remaining_life', + 'unique_id': '0123456789_cyan_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_remaining_pages', + 'unique_id': '0123456789_cyan_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_toner_remaining', + 'unique_id': '0123456789_cyan_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_page_counter', + 'unique_id': '0123456789_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '986', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_remaining_life', + 'unique_id': '0123456789_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_remaining_pages', + 'unique_id': '0123456789_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11014', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Duplex unit page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'duplex_unit_page_counter', + 'unique_id': '0123456789_duplex_unit_pages_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Duplex unit page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '538', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_fuser_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_fuser_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fuser remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fuser_remaining_life', + 'unique_id': '0123456789_fuser_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_fuser_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Fuser remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_fuser_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '97', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': '0123456789_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'HL-L2340DW Last restart', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-03-03T15:04:24+00:00', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_page_counter', + 'unique_id': '0123456789_magenta_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_remaining_life', + 'unique_id': '0123456789_magenta_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_remaining_pages', + 'unique_id': '0123456789_magenta_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_toner_remaining', + 'unique_id': '0123456789_magenta_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'page_counter', + 'unique_id': '0123456789_page_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '986', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_pf_kit_1_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_pf_kit_1_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PF Kit 1 remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pf_kit_1_remaining_life', + 'unique_id': '0123456789_pf_kit_1_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_pf_kit_1_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW PF Kit 1 remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_pf_kit_1_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '98', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '0123456789_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Status', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'waiting', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_page_counter', + 'unique_id': '0123456789_yellow_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_remaining_life', + 'unique_id': '0123456789_yellow_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_remaining_pages', + 'unique_id': '0123456789_yellow_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_toner_remaining', + 'unique_id': '0123456789_yellow_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/brother/test_sensor.py b/tests/components/brother/test_sensor.py index ff29f8cb368..39aa3b83d6f 100644 --- a/tests/components/brother/test_sensor.py +++ b/tests/components/brother/test_sensor.py @@ -1,389 +1,46 @@ """Test sensor of Brother integration.""" -from datetime import datetime, timedelta +from datetime import timedelta import json -from unittest.mock import Mock, patch +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from homeassistant.components.brother.const import DOMAIN -from homeassistant.components.brother.sensor import UNIT_PAGES -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_ENTITY_ID, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, - PERCENTAGE, - STATE_UNAVAILABLE, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util.dt import UTC, utcnow +from homeassistant.util.dt import utcnow from . import init_integration from tests.common import async_fire_time_changed, load_fixture -ATTR_REMAINING_PAGES = "remaining_pages" -ATTR_COUNTER = "counter" - -async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: - """Test states of the sensors.""" - entry = await init_integration(hass, skip_setup=True) - - # Pre-create registry entries for disabled by default sensors - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "0123456789_uptime", - suggested_object_id="hl_l2340dw_last_restart", - disabled_by=None, - ) - test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=UTC) - with ( - patch("brother.Brother.initialize"), - patch("brother.datetime", now=Mock(return_value=test_time)), - patch( - "brother.Brother._get_data", - return_value=json.loads(load_fixture("printer_data.json", "brother")), - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("sensor.hl_l2340dw_status") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.state == "waiting" - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.hl_l2340dw_status") - assert entry - assert entry.unique_id == "0123456789_status" - - state = hass.states.get("sensor.hl_l2340dw_black_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "75" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_black_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_cyan_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_cyan_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_magenta_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "8" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_magenta_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_yellow_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "2" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_yellow_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "11014" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "986" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_black_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_black_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_black_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get( - "sensor.hl_l2340dw_magenta_drum_remaining_lifetime" - ) - assert entry - assert entry.unique_id == "0123456789_magenta_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_magenta_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_magenta_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get( - "sensor.hl_l2340dw_yellow_drum_remaining_lifetime" - ) - assert entry - assert entry.unique_id == "0123456789_yellow_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_yellow_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_yellow_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_fuser_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "97" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_fuser_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_fuser_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_belt_unit_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "97" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_belt_unit_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_belt_unit_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_pf_kit_1_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "98" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_pf_kit_1_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_pf_kit_1_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "986" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_page_counter") - assert entry - assert entry.unique_id == "0123456789_page_counter" - - state = hass.states.get("sensor.hl_l2340dw_duplex_unit_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "538" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_duplex_unit_page_counter") - assert entry - assert entry.unique_id == "0123456789_duplex_unit_pages_counter" - - state = hass.states.get("sensor.hl_l2340dw_b_w_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "709" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_b_w_pages") - assert entry - assert entry.unique_id == "0123456789_bw_counter" - - state = hass.states.get("sensor.hl_l2340dw_color_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "902" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_color_pages") - assert entry - assert entry.unique_id == "0123456789_color_counter" - - state = hass.states.get("sensor.hl_l2340dw_last_restart") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP - assert state.state == "2019-09-24T12:14:56+00:00" - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.hl_l2340dw_last_restart") - assert entry - assert entry.unique_id == "0123456789_uptime" - - -async def test_disabled_by_default_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entity_registry_enabled_by_default: None, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: - """Test the disabled by default Brother sensors.""" - await init_integration(hass) + """Test states of the sensors.""" + hass.config.set_time_zone("UTC") + freezer.move_to("2024-04-20 12:00:00+00:00") - state = hass.states.get("sensor.hl_l2340dw_last_restart") - assert state is None + with patch("homeassistant.components.brother.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - entry = entity_registry.async_get("sensor.hl_l2340dw_last_restart") - assert entry - assert entry.unique_id == "0123456789_uptime" - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_availability(hass: HomeAssistant) -> None: From 194f3366ce47adcff2da00dafa650bddccbae7bf Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 12:34:27 +0200 Subject: [PATCH 197/426] Use snapshot testing in NAM sensor and diagnostics (#115877) * Use snapshot testing in NAM diagnostics * Use snapshot testing in NAM sensor * Add NAM data fixture --------- Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- tests/components/nam/__init__.py | 35 +- .../nam/fixtures/diagnostics_data.json | 33 - tests/components/nam/fixtures/nam_data.json | 30 + .../nam/snapshots/test_diagnostics.ambr | 41 + .../components/nam/snapshots/test_sensor.ambr | 1714 +++++++++++++++++ tests/components/nam/test_diagnostics.py | 12 +- tests/components/nam/test_sensor.py | 474 +---- 7 files changed, 1820 insertions(+), 519 deletions(-) delete mode 100644 tests/components/nam/fixtures/diagnostics_data.json create mode 100644 tests/components/nam/fixtures/nam_data.json create mode 100644 tests/components/nam/snapshots/test_diagnostics.ambr create mode 100644 tests/components/nam/snapshots/test_sensor.ambr diff --git a/tests/components/nam/__init__.py b/tests/components/nam/__init__.py index 0484fc12bd6..9b254de452c 100644 --- a/tests/components/nam/__init__.py +++ b/tests/components/nam/__init__.py @@ -4,44 +4,13 @@ from unittest.mock import AsyncMock, Mock, patch from homeassistant.components.nam.const import DOMAIN -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture INCOMPLETE_NAM_DATA = { "software_version": "NAMF-2020-36", "sensordatavalues": [], } -nam_data = { - "software_version": "NAMF-2020-36", - "uptime": "456987", - "sensordatavalues": [ - {"value_type": "PMS_P0", "value": "6.00"}, - {"value_type": "PMS_P1", "value": "10.00"}, - {"value_type": "PMS_P2", "value": "11.00"}, - {"value_type": "SDS_P1", "value": "18.65"}, - {"value_type": "SDS_P2", "value": "11.03"}, - {"value_type": "SPS30_P0", "value": "31.23"}, - {"value_type": "SPS30_P1", "value": "21.23"}, - {"value_type": "SPS30_P2", "value": "34.32"}, - {"value_type": "SPS30_P4", "value": "24.72"}, - {"value_type": "conc_co2_ppm", "value": "865"}, - {"value_type": "BME280_temperature", "value": "7.56"}, - {"value_type": "BME280_humidity", "value": "45.69"}, - {"value_type": "BME280_pressure", "value": "101101.17"}, - {"value_type": "BMP_temperature", "value": "7.56"}, - {"value_type": "BMP_pressure", "value": "103201.18"}, - {"value_type": "BMP280_temperature", "value": "5.56"}, - {"value_type": "BMP280_pressure", "value": "102201.18"}, - {"value_type": "SHT3X_temperature", "value": "6.28"}, - {"value_type": "SHT3X_humidity", "value": "34.69"}, - {"value_type": "humidity", "value": "46.23"}, - {"value_type": "temperature", "value": "6.26"}, - {"value_type": "HECA_temperature", "value": "7.95"}, - {"value_type": "HECA_humidity", "value": "49.97"}, - {"value_type": "signal", "value": "-72"}, - ], -} - async def init_integration(hass, co2_sensor=True) -> MockConfigEntry: """Set up the Nettigo Air Monitor integration in Home Assistant.""" @@ -52,6 +21,8 @@ async def init_integration(hass, co2_sensor=True) -> MockConfigEntry: data={"host": "10.10.2.3"}, ) + nam_data = load_json_object_fixture("nam/nam_data.json") + if not co2_sensor: # Remove conc_co2_ppm value nam_data["sensordatavalues"].pop(6) diff --git a/tests/components/nam/fixtures/diagnostics_data.json b/tests/components/nam/fixtures/diagnostics_data.json deleted file mode 100644 index a384e8cd386..00000000000 --- a/tests/components/nam/fixtures/diagnostics_data.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "bme280_humidity": 45.7, - "bme280_pressure": 1011.012, - "bme280_temperature": 7.6, - "bmp180_pressure": 1032.012, - "bmp180_temperature": 7.6, - "bmp280_pressure": 1022.012, - "bmp280_temperature": 5.6, - "dht22_humidity": 46.2, - "dht22_temperature": 6.3, - "heca_humidity": 50.0, - "heca_temperature": 8.0, - "mhz14a_carbon_dioxide": 865.0, - "pms_caqi": 19, - "pms_caqi_level": "very_low", - "pms_p0": 6.0, - "pms_p1": 10.0, - "pms_p2": 11.0, - "sds011_caqi": 19, - "sds011_caqi_level": "very_low", - "sds011_p1": 18.6, - "sds011_p2": 11.0, - "sht3x_humidity": 34.7, - "sht3x_temperature": 6.3, - "signal": -72.0, - "sps30_caqi": 54, - "sps30_caqi_level": "medium", - "sps30_p0": 31.2, - "sps30_p1": 21.2, - "sps30_p2": 34.3, - "sps30_p4": 24.7, - "uptime": 456987 -} diff --git a/tests/components/nam/fixtures/nam_data.json b/tests/components/nam/fixtures/nam_data.json new file mode 100644 index 00000000000..93a33d4a552 --- /dev/null +++ b/tests/components/nam/fixtures/nam_data.json @@ -0,0 +1,30 @@ +{ + "software_version": "NAMF-2020-36", + "uptime": "456987", + "sensordatavalues": [ + { "value_type": "PMS_P0", "value": "6.00" }, + { "value_type": "PMS_P1", "value": "10.00" }, + { "value_type": "PMS_P2", "value": "11.00" }, + { "value_type": "SDS_P1", "value": "18.65" }, + { "value_type": "SDS_P2", "value": "11.03" }, + { "value_type": "SPS30_P0", "value": "31.23" }, + { "value_type": "SPS30_P1", "value": "21.23" }, + { "value_type": "SPS30_P2", "value": "34.32" }, + { "value_type": "SPS30_P4", "value": "24.72" }, + { "value_type": "conc_co2_ppm", "value": "865" }, + { "value_type": "BME280_temperature", "value": "7.56" }, + { "value_type": "BME280_humidity", "value": "45.69" }, + { "value_type": "BME280_pressure", "value": "101101.17" }, + { "value_type": "BMP_temperature", "value": "7.56" }, + { "value_type": "BMP_pressure", "value": "103201.18" }, + { "value_type": "BMP280_temperature", "value": "5.56" }, + { "value_type": "BMP280_pressure", "value": "102201.18" }, + { "value_type": "SHT3X_temperature", "value": "6.28" }, + { "value_type": "SHT3X_humidity", "value": "34.69" }, + { "value_type": "humidity", "value": "46.23" }, + { "value_type": "temperature", "value": "6.26" }, + { "value_type": "HECA_temperature", "value": "7.95" }, + { "value_type": "HECA_humidity", "value": "49.97" }, + { "value_type": "signal", "value": "-72" } + ] +} diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..2ebc0246090 --- /dev/null +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'data': dict({ + 'bme280_humidity': 45.7, + 'bme280_pressure': 1011.012, + 'bme280_temperature': 7.6, + 'bmp180_pressure': 1032.012, + 'bmp180_temperature': 7.6, + 'bmp280_pressure': 1022.012, + 'bmp280_temperature': 5.6, + 'dht22_humidity': 46.2, + 'dht22_temperature': 6.3, + 'heca_humidity': 50.0, + 'heca_temperature': 8.0, + 'mhz14a_carbon_dioxide': 865.0, + 'pms_caqi': 19, + 'pms_caqi_level': 'very_low', + 'pms_p0': 6.0, + 'pms_p1': 10.0, + 'pms_p2': 11.0, + 'sds011_caqi': 19, + 'sds011_caqi_level': 'very_low', + 'sds011_p1': 18.6, + 'sds011_p2': 11.0, + 'sht3x_humidity': 34.7, + 'sht3x_temperature': 6.3, + 'signal': -72.0, + 'sps30_caqi': 54, + 'sps30_caqi_level': 'medium', + 'sps30_p0': 31.2, + 'sps30_p1': 21.2, + 'sps30_p2': 34.3, + 'sps30_p4': 24.7, + 'uptime': 456987, + }), + 'info': dict({ + 'host': '10.10.2.3', + }), + }) +# --- diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..bbc655ecbb6 --- /dev/null +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -0,0 +1,1714 @@ +# serializer version: 1 +# name: test_sensor[button.nettigo_air_monitor_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.nettigo_air_monitor_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Nettigo Air Monitor Restart', + }), + 'context': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor BME280 humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.7', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BME280 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1011.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BME280 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP180 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp180_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp180_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BMP180 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1032.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP180 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp180_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp180_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BMP180 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP280 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp280_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp280_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BMP280 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1022.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP280 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp280_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp280_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BMP280 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_dht22_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHT22 humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dht22_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-dht22_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor DHT22 humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_dht22_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_dht22_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHT22 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dht22_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-dht22_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor DHT22 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_dht22_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_heca_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HECA humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heca_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-heca_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor HECA humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_heca_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_heca_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HECA temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heca_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-heca_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor HECA temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_heca_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nettigo_air_monitor_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': 'aa:bb:cc:dd:ee:ff-uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nettigo Air Monitor Last restart', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-04-15T05:03:33+00:00', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'MH-Z14A carbon dioxide', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mhz14a_carbon_dioxide', + 'unique_id': 'aa:bb:cc:dd:ee:ff-mhz14a_carbon_dioxide', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'Nettigo Air Monitor MH-Z14A carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '865.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PMSx003 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor PMSx003 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor PMSx003 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_low', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM1', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm1', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SDS011 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SDS011 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor SDS011 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_low', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor SDS011 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor SDS011 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SHT3X humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sht3x_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sht3x_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor SHT3X humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.7', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SHT3X temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sht3x_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sht3x_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor SHT3X temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nettigo_air_monitor_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff-signal', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Nettigo Air Monitor Signal strength', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-72.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SPS30 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SPS30 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor SPS30 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM1', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm1', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SPS30 PM4', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm4', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p4', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SPS30 PM4', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24.7', + }) +# --- diff --git a/tests/components/nam/test_diagnostics.py b/tests/components/nam/test_diagnostics.py index 9d13121392f..7ed49a37e0a 100644 --- a/tests/components/nam/test_diagnostics.py +++ b/tests/components/nam/test_diagnostics.py @@ -1,25 +1,23 @@ """Test NAM diagnostics.""" -import json +from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant from . import init_integration -from tests.common import load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = await init_integration(hass) - diagnostics_data = json.loads(load_fixture("diagnostics_data.json", "nam")) - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result["info"] == {"host": "10.10.2.3"} - assert result["data"] == diagnostics_data + assert result == snapshot diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index c88a34ae497..5254c444434 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -3,27 +3,18 @@ from datetime import timedelta from unittest.mock import AsyncMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory from nettigo_air_monitor import ApiError +from syrupy import SnapshotAssertion from homeassistant.components.nam.const import DOMAIN -from homeassistant.components.sensor import ( - ATTR_OPTIONS, - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, - ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_MILLION, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, STATE_UNAVAILABLE, - UnitOfPressure, + Platform, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -31,447 +22,32 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow -from . import INCOMPLETE_NAM_DATA, init_integration, nam_data +from . import INCOMPLETE_NAM_DATA, init_integration -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, load_json_object_fixture -async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensor( + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: """Test states of the air_quality.""" - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "aa:bb:cc:dd:ee:ff-signal", - suggested_object_id="nettigo_air_monitor_signal_strength", - disabled_by=None, - ) + hass.config.set_time_zone("UTC") + freezer.move_to("2024-04-20 12:00:00+00:00") - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "aa:bb:cc:dd:ee:ff-uptime", - suggested_object_id="nettigo_air_monitor_uptime", - disabled_by=None, - ) + with patch("homeassistant.components.nam.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - # Patch return value from utcnow, with offset to make sure the patch is correct - now = utcnow() - timedelta(hours=1) - with patch("homeassistant.components.nam.sensor.utcnow", return_value=now): - await init_integration(hass) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - state = hass.states.get("sensor.nettigo_air_monitor_bme280_humidity") - assert state - assert state.state == "45.7" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") - assert state - assert state.state == "7.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bme280_pressure") - assert state - assert state.state == "1011.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp180_temperature") - assert state - assert state.state == "7.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp180_pressure") - assert state - assert state.state == "1032.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp280_temperature") - assert state - assert state.state == "5.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp280_pressure") - assert state - assert state.state == "1022.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_sht3x_humidity") - assert state - assert state.state == "34.7" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_sht3x_temperature") - assert state - assert state.state == "6.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_dht22_humidity") - assert state - assert state.state == "46.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_dht22_temperature") - assert state - assert state.state == "6.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_heca_humidity") - assert state - assert state.state == "50.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_heca_temperature") - assert state - assert state.state == "8.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_signal_strength") - assert state - assert state.state == "-72.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.SIGNAL_STRENGTH - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == SIGNAL_STRENGTH_DECIBELS_MILLIWATT - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_signal_strength") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-signal" - - state = hass.states.get("sensor.nettigo_air_monitor_uptime") - assert state - assert ( - state.state - == (now - timedelta(seconds=456987)).replace(microsecond=0).isoformat() - ) - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_uptime") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-uptime" - - state = hass.states.get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level" - ) - assert state - assert state.state == "very_low" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_caqi_level" - assert entry.translation_key == "pmsx003_caqi_level" - - state = hass.states.get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index" - ) - assert state - assert state.state == "19" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_caqi" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm10") - assert state - assert state.state == "10.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p1" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm2_5") - assert state - assert state.state == "11.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm1") - assert state - assert state.state == "6.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM1 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm1") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p0" - - state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm10") - assert state - assert state.state == "18.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p1" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index" - ) - assert state - assert state.state == "19" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_caqi" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index_level" - ) - assert state - assert state.state == "very_low" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_caqi_level" - assert entry.translation_key == "sds011_caqi_level" - - state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm2_5") - assert state - assert state.state == "11.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_common_air_quality_index") - assert state - assert state.state == "54" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_caqi" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index_level" - ) - assert state - assert state.state == "medium" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_caqi_level" - assert entry.translation_key == "sps30_caqi_level" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm1") - assert state - assert state.state == "31.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM1 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm1") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p0" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm10") - assert state - assert state.state == "21.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p1" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm2_5") - assert state - assert state.state == "34.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm4") - assert state - assert state.state == "24.7" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm4") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p4" - - state = hass.states.get("sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide") - assert state - assert state.state == "865.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.CO2 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_PARTS_PER_MILLION - ) - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-mhz14a_carbon_dioxide" + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") async def test_sensor_disabled( @@ -524,6 +100,8 @@ async def test_incompleta_data_after_device_restart(hass: HomeAssistant) -> None async def test_availability(hass: HomeAssistant) -> None: """Ensure that we mark the entities unavailable correctly when device causes an error.""" + nam_data = load_json_object_fixture("nam/nam_data.json") + await init_integration(hass) state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") @@ -566,6 +144,8 @@ async def test_availability(hass: HomeAssistant) -> None: async def test_manual_update_entity(hass: HomeAssistant) -> None: """Test manual update entity via service homeasasistant/update_entity.""" + nam_data = load_json_object_fixture("nam/nam_data.json") + await init_integration(hass) await async_setup_component(hass, "homeassistant", {}) From e3ce3ed6fd4e66e911a120c1986c3b9e92932723 Mon Sep 17 00:00:00 2001 From: jjlawren Date: Sat, 20 Apr 2024 05:36:03 -0500 Subject: [PATCH 198/426] Bump plexapi to 4.15.12 (#115872) --- homeassistant/components/plex/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 85362371715..ff0ab39b150 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.11", + "PlexAPI==4.15.12", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/requirements_all.txt b/requirements_all.txt index a7111a73737..a740150a70f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ Mastodon.py==1.8.1 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70c1b2d244b..2258a5ba786 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -39,7 +39,7 @@ HATasmota==0.8.0 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 From 8f73422ce548bd53a94816fd10b34a08da2b8aa1 Mon Sep 17 00:00:00 2001 From: Nathan Spencer Date: Sat, 20 Apr 2024 03:37:35 -0700 Subject: [PATCH 199/426] Bump pylitterbot to 2023.5.0 (#115856) --- homeassistant/components/litterrobot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/litterrobot/manifest.json b/homeassistant/components/litterrobot/manifest.json index 66ade5f356c..88396f9f9c1 100644 --- a/homeassistant/components/litterrobot/manifest.json +++ b/homeassistant/components/litterrobot/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["pylitterbot"], - "requirements": ["pylitterbot==2023.4.11"] + "requirements": ["pylitterbot==2023.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a740150a70f..a15dd411020 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1941,7 +1941,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.11 +pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2258a5ba786..ac721b30c22 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1516,7 +1516,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.11 +pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 From 16e31d8f74d6e92675af8382d042117df4bf42e7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 14:49:57 +0200 Subject: [PATCH 200/426] Add test helper to snapshot a platform (#115880) * Add test helper to snapshot a platform * Add test helper to snapshot a platform --- tests/common.py | 20 ++++++++++++++++++++ tests/components/withings/test_sensor.py | 13 ++++--------- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/tests/common.py b/tests/common.py index b12f0ed37da..d53db1beb37 100644 --- a/tests/common.py +++ b/tests/common.py @@ -22,6 +22,7 @@ from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 import pytest +from syrupy import SnapshotAssertion import voluptuous as vol from homeassistant import auth, bootstrap, config_entries, loader @@ -1733,3 +1734,22 @@ def setup_test_component_platform( mock_platform(hass, f"test.{domain}", platform, built_in=built_in) return platform + + +async def snapshot_platform( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + config_entry_id: str, +) -> None: + """Snapshot a platform.""" + entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id) + assert entity_entries + assert ( + len({entity_entry.domain for entity_entry in entity_entries}) == 1 + ), "Please limit the loaded platforms to 1 platform." + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_entry.disabled_by is None, "Please enable all entities." + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/withings/test_sensor.py b/tests/components/withings/test_sensor.py index 72da4b9d973..8966006e47f 100644 --- a/tests/components/withings/test_sensor.py +++ b/tests/components/withings/test_sensor.py @@ -21,7 +21,7 @@ from . import ( setup_integration, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.freeze_time("2023-10-21") @@ -36,15 +36,10 @@ async def test_all_entities( """Test all entities.""" with patch("homeassistant.components.withings.PLATFORMS", [Platform.SENSOR]): await setup_integration(hass, polling_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, polling_config_entry.entry_id - ) - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform( + hass, entity_registry, snapshot, polling_config_entry.entry_id + ) async def test_update_failed( From 5796b651afd6edfe510124ece56a32ed985dc53e Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 17:14:42 +0200 Subject: [PATCH 201/426] Use snapshot test helper in Brother (#115885) --- tests/components/brother/test_sensor.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/components/brother/test_sensor.py b/tests/components/brother/test_sensor.py index 39aa3b83d6f..069a5ddc152 100644 --- a/tests/components/brother/test_sensor.py +++ b/tests/components/brother/test_sensor.py @@ -17,7 +17,7 @@ from homeassistant.util.dt import utcnow from . import init_integration -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_fixture, snapshot_platform async def test_sensors( @@ -34,13 +34,7 @@ async def test_sensors( with patch("homeassistant.components.brother.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: From b328981868182df33a355b438e64fd5549aa9909 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 17:24:40 +0200 Subject: [PATCH 202/426] Use snapshot test helper in Accuweather (#115884) --- .../accuweather/snapshots/test_weather.ambr | 218 +++++------------- tests/components/accuweather/test_sensor.py | 10 +- tests/components/accuweather/test_weather.py | 57 +---- 3 files changed, 75 insertions(+), 210 deletions(-) diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 081e7bf595a..1542d22aa7b 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,158 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }), - }) -# --- # name: test_forecast_service[get_forecast] dict({ 'forecast': list([ @@ -455,3 +301,67 @@ }), ]) # --- +# name: test_weather[weather.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'weather', + 'entity_category': None, + 'entity_id': 'weather.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '0123456', + 'unit_of_measurement': None, + }) +# --- +# name: test_weather[weather.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'apparent_temperature': 22.8, + 'attribution': 'Data provided by AccuWeather', + 'cloud_coverage': 10, + 'dew_point': 16.2, + 'friendly_name': 'Home', + 'humidity': 67, + 'precipitation_unit': , + 'pressure': 1012.0, + 'pressure_unit': , + 'supported_features': , + 'temperature': 22.6, + 'temperature_unit': , + 'uv_index': 6, + 'visibility': 16.1, + 'visibility_unit': , + 'wind_bearing': 180, + 'wind_gust_speed': 20.3, + 'wind_speed': 14.5, + 'wind_speed_unit': , + }), + 'context': , + 'entity_id': 'weather.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sunny', + }) +# --- diff --git a/tests/components/accuweather/test_sensor.py b/tests/components/accuweather/test_sensor.py index e79e49db96d..127e4d74cd8 100644 --- a/tests/components/accuweather/test_sensor.py +++ b/tests/components/accuweather/test_sensor.py @@ -30,6 +30,7 @@ from tests.common import ( async_fire_time_changed, load_json_array_fixture, load_json_object_fixture, + snapshot_platform, ) @@ -42,14 +43,7 @@ async def test_sensor( """Test states of the sensor.""" with patch("homeassistant.components.accuweather.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/accuweather/test_weather.py b/tests/components/accuweather/test_weather.py index b3237ca2958..d97a5d3da3c 100644 --- a/tests/components/accuweather/test_weather.py +++ b/tests/components/accuweather/test_weather.py @@ -7,34 +7,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.accuweather.const import ( - ATTRIBUTION, - UPDATE_INTERVAL_DAILY_FORECAST, -) +from homeassistant.components.accuweather.const import UPDATE_INTERVAL_DAILY_FORECAST from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, - ATTR_WEATHER_APPARENT_TEMPERATURE, - ATTR_WEATHER_CLOUD_COVERAGE, - ATTR_WEATHER_DEW_POINT, - ATTR_WEATHER_HUMIDITY, - ATTR_WEATHER_PRESSURE, - ATTR_WEATHER_TEMPERATURE, - ATTR_WEATHER_UV_INDEX, - ATTR_WEATHER_VISIBILITY, - ATTR_WEATHER_WIND_BEARING, - ATTR_WEATHER_WIND_GUST_SPEED, - ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, LEGACY_SERVICE_GET_FORECAST, SERVICE_GET_FORECASTS, - WeatherEntityFeature, -) -from homeassistant.const import ( - ATTR_ATTRIBUTION, - ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, - STATE_UNAVAILABLE, ) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -46,37 +26,18 @@ from tests.common import ( async_fire_time_changed, load_json_array_fixture, load_json_object_fixture, + snapshot_platform, ) from tests.typing import WebSocketGenerator -async def test_weather(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_weather( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the weather without forecast.""" - await init_integration(hass) - - state = hass.states.get("weather.home") - assert state - assert state.state == "sunny" - assert state.attributes.get(ATTR_WEATHER_HUMIDITY) == 67 - assert state.attributes.get(ATTR_WEATHER_PRESSURE) == 1012.0 - assert state.attributes.get(ATTR_WEATHER_TEMPERATURE) == 22.6 - assert state.attributes.get(ATTR_WEATHER_VISIBILITY) == 16.1 - assert state.attributes.get(ATTR_WEATHER_WIND_BEARING) == 180 - assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 14.5 # 4.03 m/s -> km/h - assert state.attributes.get(ATTR_WEATHER_APPARENT_TEMPERATURE) == 22.8 - assert state.attributes.get(ATTR_WEATHER_DEW_POINT) == 16.2 - assert state.attributes.get(ATTR_WEATHER_CLOUD_COVERAGE) == 10 - assert state.attributes.get(ATTR_WEATHER_WIND_GUST_SPEED) == 20.3 - assert state.attributes.get(ATTR_WEATHER_UV_INDEX) == 6 - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert ( - state.attributes.get(ATTR_SUPPORTED_FEATURES) - is WeatherEntityFeature.FORECAST_DAILY - ) - - entry = entity_registry.async_get("weather.home") - assert entry - assert entry.unique_id == "0123456" + with patch("homeassistant.components.accuweather.PLATFORMS", [Platform.WEATHER]): + entry = await init_integration(hass) + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: From de1312f7e4189b60dd1df491d1d168d52f11de00 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 18:43:25 +0200 Subject: [PATCH 203/426] Use snapshot test helper in GIOS (#115893) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- tests/components/gios/test_sensor.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/components/gios/test_sensor.py b/tests/components/gios/test_sensor.py index e760e050f2b..b24d88ccb8d 100644 --- a/tests/components/gios/test_sensor.py +++ b/tests/components/gios/test_sensor.py @@ -17,7 +17,7 @@ from homeassistant.util.dt import utcnow from . import init_integration -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_fixture, snapshot_platform async def test_sensor( @@ -27,13 +27,7 @@ async def test_sensor( with patch("homeassistant.components.gios.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: From 5e345b7129b01d89b98589153374d98924e31804 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 18:43:33 +0200 Subject: [PATCH 204/426] Use snapshot test helper in NAM (#115894) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- tests/components/nam/test_sensor.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index 5254c444434..2b307b4b02a 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -24,7 +24,11 @@ from homeassistant.util.dt import utcnow from . import INCOMPLETE_NAM_DATA, init_integration -from tests.common import async_fire_time_changed, load_json_object_fixture +from tests.common import ( + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) async def test_sensor( @@ -41,13 +45,7 @@ async def test_sensor( with patch("homeassistant.components.nam.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_sensor_disabled( From 10be2cc0044b6b010cb2d16c4a31961dc4cc4ea1 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 20 Apr 2024 18:43:40 +0200 Subject: [PATCH 205/426] Use snapshot test helper in NextDNS (#115895) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- tests/components/nextdns/test_binary_sensor.py | 10 ++-------- tests/components/nextdns/test_button.py | 10 +++------- tests/components/nextdns/test_sensor.py | 10 ++-------- tests/components/nextdns/test_switch.py | 10 ++-------- 4 files changed, 9 insertions(+), 31 deletions(-) diff --git a/tests/components/nextdns/test_binary_sensor.py b/tests/components/nextdns/test_binary_sensor.py index f83e55515e8..19cad755fb4 100644 --- a/tests/components/nextdns/test_binary_sensor.py +++ b/tests/components/nextdns/test_binary_sensor.py @@ -13,7 +13,7 @@ from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_binary_sensor( @@ -23,13 +23,7 @@ async def test_binary_sensor( with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BINARY_SENSOR]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_button.py b/tests/components/nextdns/test_button.py index 2007af612c8..51970b9bb48 100644 --- a/tests/components/nextdns/test_button.py +++ b/tests/components/nextdns/test_button.py @@ -12,6 +12,8 @@ from homeassistant.util import dt as dt_util from . import init_integration +from tests.common import snapshot_platform + async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion @@ -20,13 +22,7 @@ async def test_button( with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BUTTON]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_button_press(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_sensor.py b/tests/components/nextdns/test_sensor.py index 9c03cf2b215..e7ea7a3f56b 100644 --- a/tests/components/nextdns/test_sensor.py +++ b/tests/components/nextdns/test_sensor.py @@ -13,7 +13,7 @@ from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_sensor( @@ -26,13 +26,7 @@ async def test_sensor( with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability( diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index 5e027c6789c..2936bad1c67 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -26,7 +26,7 @@ from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_switch( @@ -39,13 +39,7 @@ async def test_switch( with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SWITCH]): entry = await init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_switch_on(hass: HomeAssistant) -> None: From d478b87af79cf7f2e62b8f1a73bf92bf893d2cb4 Mon Sep 17 00:00:00 2001 From: mtielen <6302356+mtielen@users.noreply.github.com> Date: Sat, 20 Apr 2024 19:09:32 +0200 Subject: [PATCH 206/426] Fix Wolf Smart Set Authentication and Session Management (#115815) * Fix Wolf Smart Set Authentication and Session Management Fix in the library to respect Wolf API token lifetime and implement Session Management * Updatie requirments * Update Code Owner --- CODEOWNERS | 4 ++-- homeassistant/components/wolflink/manifest.json | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 98f52070ed1..0a833a94e4e 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1582,8 +1582,8 @@ build.json @home-assistant/supervisor /tests/components/wiz/ @sbidy /homeassistant/components/wled/ @frenck /tests/components/wled/ @frenck -/homeassistant/components/wolflink/ @adamkrol93 -/tests/components/wolflink/ @adamkrol93 +/homeassistant/components/wolflink/ @adamkrol93 @mtielen +/tests/components/wolflink/ @adamkrol93 @mtielen /homeassistant/components/workday/ @fabaff @gjohansson-ST /tests/components/workday/ @fabaff @gjohansson-ST /homeassistant/components/worldclock/ @fabaff diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 6b51c0fb2cb..88dcce39993 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -1,10 +1,10 @@ { "domain": "wolflink", "name": "Wolf SmartSet Service", - "codeowners": ["@adamkrol93"], + "codeowners": ["@adamkrol93", "@mtielen"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.6"] + "requirements": ["wolf-comm==0.0.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index a15dd411020..1f066526f58 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2866,7 +2866,7 @@ wirelesstagpy==0.8.1 wled==0.17.0 # homeassistant.components.wolflink -wolf-comm==0.0.6 +wolf-comm==0.0.7 # homeassistant.components.wyoming wyoming==1.5.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ac721b30c22..91a3c65c3fd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2222,7 +2222,7 @@ wiffi==1.1.2 wled==0.17.0 # homeassistant.components.wolflink -wolf-comm==0.0.6 +wolf-comm==0.0.7 # homeassistant.components.wyoming wyoming==1.5.3 From c7530937410c925e7bebe9eedc36fac6fc75c6fa Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 19:10:56 +0200 Subject: [PATCH 207/426] Use snapshot test helper in AO Smith (#115890) --- .../aosmith/snapshots/test_sensor.ambr | 81 +++++++++++- .../aosmith/snapshots/test_water_heater.ambr | 121 ++++++++++++++---- tests/components/aosmith/test_sensor.py | 44 ++----- tests/components/aosmith/test_water_heater.py | 53 +++----- 4 files changed, 207 insertions(+), 92 deletions(-) diff --git a/tests/components/aosmith/snapshots/test_sensor.ambr b/tests/components/aosmith/snapshots/test_sensor.ambr index 150e0c2934f..7aae9713037 100644 --- a/tests/components/aosmith/snapshots/test_sensor.ambr +++ b/tests/components/aosmith/snapshots/test_sensor.ambr @@ -1,5 +1,43 @@ # serializer version: 1 -# name: test_state[sensor.my_water_heater_energy_usage] +# name: test_state[sensor.my_water_heater_energy_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_water_heater_energy_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage', + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage', + 'unique_id': 'energy_usage_junctionId', + 'unit_of_measurement': , + }) +# --- +# name: test_state[sensor.my_water_heater_energy_usage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -15,7 +53,46 @@ 'state': '132.825', }) # --- -# name: test_state[sensor.my_water_heater_hot_water_availability] +# name: test_state[sensor.my_water_heater_hot_water_availability-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_water_heater_hot_water_availability', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water availability', + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hot_water_availability', + 'unique_id': 'hot_water_availability_junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[sensor.my_water_heater_hot_water_availability-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'enum', diff --git a/tests/components/aosmith/snapshots/test_water_heater.ambr b/tests/components/aosmith/snapshots/test_water_heater.ambr index c3740341c17..deb079570f1 100644 --- a/tests/components/aosmith/snapshots/test_water_heater.ambr +++ b/tests/components/aosmith/snapshots/test_water_heater.ambr @@ -1,5 +1,103 @@ # serializer version: 1 -# name: test_state +# name: test_state[False][water_heater.my_water_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 130, + 'min_temp': 95, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.my_water_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[False][water_heater.my_water_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'off', + 'current_temperature': None, + 'friendly_name': 'My water heater', + 'max_temp': 130, + 'min_temp': 95, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 130, + }), + 'context': , + 'entity_id': 'water_heater.my_water_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'electric', + }) +# --- +# name: test_state[True][water_heater.my_water_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 130, + 'min_temp': 95, + 'operation_list': list([ + 'electric', + 'eco', + 'heat_pump', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.my_water_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[True][water_heater.my_water_heater-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'away_mode': 'off', @@ -26,24 +124,3 @@ 'state': 'heat_pump', }) # --- -# name: test_state_non_heat_pump[False] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'off', - 'current_temperature': None, - 'friendly_name': 'My water heater', - 'max_temp': 130, - 'min_temp': 95, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': 130, - }), - 'context': , - 'entity_id': 'water_heater.my_water_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'electric', - }) -# --- diff --git a/tests/components/aosmith/test_sensor.py b/tests/components/aosmith/test_sensor.py index f94dfdb710c..d6acd8865d8 100644 --- a/tests/components/aosmith/test_sensor.py +++ b/tests/components/aosmith/test_sensor.py @@ -1,50 +1,30 @@ """Tests for the sensor platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator +from unittest.mock import patch + import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -@pytest.mark.parametrize( - ("entity_id", "unique_id"), - [ - ( - "sensor.my_water_heater_hot_water_availability", - "hot_water_availability_junctionId", - ), - ("sensor.my_water_heater_energy_usage", "energy_usage_junctionId"), - ], -) -async def test_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - entity_id: str, - unique_id: str, -) -> None: - """Test the setup of the sensor entities.""" - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == unique_id +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[list[str], None]: + """Return the platforms to be loaded for this test.""" + with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SENSOR]): + yield -@pytest.mark.parametrize( - ("entity_id"), - [ - "sensor.my_water_heater_hot_water_availability", - "sensor.my_water_heater_energy_usage", - ], -) async def test_state( hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion, - entity_id: str, + entity_registry: er.EntityRegistry, ) -> None: """Test the state of the sensor entities.""" - state = hass.states.get(entity_id) - assert state == snapshot + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) diff --git a/tests/components/aosmith/test_water_heater.py b/tests/components/aosmith/test_water_heater.py index a256f720c0a..567121ac0b0 100644 --- a/tests/components/aosmith/test_water_heater.py +++ b/tests/components/aosmith/test_water_heater.py @@ -1,6 +1,7 @@ """Tests for the water heater platform of the A. O. Smith integration.""" -from unittest.mock import MagicMock +from collections.abc import AsyncGenerator +from unittest.mock import MagicMock, patch from py_aosmith.models import OperationMode import pytest @@ -19,53 +20,33 @@ from homeassistant.components.water_heater import ( STATE_HEAT_PUMP, WaterHeaterEntityFeature, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_FRIENDLY_NAME, - ATTR_SUPPORTED_FEATURES, -) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -async def test_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, -) -> None: - """Test the setup of the water heater entity.""" - entry = entity_registry.async_get("water_heater.my_water_heater") - assert entry - assert entry.unique_id == "junctionId" - - state = hass.states.get("water_heater.my_water_heater") - assert state - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My water heater" - - -async def test_state( - hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test the state of the water heater entity.""" - state = hass.states.get("water_heater.my_water_heater") - assert state == snapshot +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[list[str], None]: + """Return the platforms to be loaded for this test.""" + with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.WATER_HEATER]): + yield @pytest.mark.parametrize( ("get_devices_fixture_heat_pump"), - [ - False, - ], + [False, True], ) -async def test_state_non_heat_pump( - hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion +async def test_state( + hass: HomeAssistant, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: - """Test the state of the water heater entity for a non heat pump device.""" - state = hass.states.get("water_heater.my_water_heater") - assert state == snapshot + """Test the state of the water heater entities.""" + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) @pytest.mark.parametrize( From c94b0a82ca8ee6df1f8d5d11cb99be610141ffb0 Mon Sep 17 00:00:00 2001 From: Alberto Montes Date: Sat, 20 Apr 2024 20:01:49 +0200 Subject: [PATCH 208/426] Make release channel a hardcoded enum rather than a free form string (#115595) * Make release channel a hardcoded enum rather than a free form string * Update enum comparison to remove equality and us identity comparison * Fix comparison condition to match the previous implementation * Update tests to use Enum instead of string --- homeassistant/core.py | 18 ++++++++++------ homeassistant/helpers/device_registry.py | 26 ++++++++++++++++-------- homeassistant/helpers/entity.py | 3 ++- tests/helpers/test_device_registry.py | 10 ++++----- tests/helpers/test_entity.py | 9 ++++---- tests/test_core.py | 13 +++++++----- 6 files changed, 50 insertions(+), 29 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 01536f8ffdb..919e0adb758 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -36,7 +36,6 @@ from typing import ( TYPE_CHECKING, Any, Generic, - Literal, NotRequired, ParamSpec, Self, @@ -279,17 +278,24 @@ def async_get_hass() -> HomeAssistant: return _hass.hass +class ReleaseChannel(enum.StrEnum): + BETA = "beta" + DEV = "dev" + NIGHTLY = "nightly" + STABLE = "stable" + + @callback -def get_release_channel() -> Literal["beta", "dev", "nightly", "stable"]: +def get_release_channel() -> ReleaseChannel: """Find release channel based on version number.""" version = __version__ if "dev0" in version: - return "dev" + return ReleaseChannel.DEV if "dev" in version: - return "nightly" + return ReleaseChannel.NIGHTLY if "b" in version: - return "beta" - return "stable" + return ReleaseChannel.BETA + return ReleaseChannel.STABLE @enum.unique diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 3a9d047810b..00d0a0ba62f 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -13,7 +13,13 @@ import attr from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, HomeAssistant, callback, get_release_channel +from homeassistant.core import ( + Event, + HomeAssistant, + ReleaseChannel, + callback, + get_release_channel, +) from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue from homeassistant.util.event_type import EventType @@ -608,7 +614,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): try: return name.format(**translation_placeholders) except KeyError as err: - if get_release_channel() != "stable": + if get_release_channel() is not ReleaseChannel.STABLE: raise HomeAssistantError("Missing placeholder %s" % err) from err report_issue = async_suggest_report_issue( self.hass, integration_domain=domain @@ -963,12 +969,16 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): tuple(conn) # type: ignore[misc] for conn in device["connections"] }, - disabled_by=DeviceEntryDisabler(device["disabled_by"]) - if device["disabled_by"] - else None, - entry_type=DeviceEntryType(device["entry_type"]) - if device["entry_type"] - else None, + disabled_by=( + DeviceEntryDisabler(device["disabled_by"]) + if device["disabled_by"] + else None + ), + entry_type=( + DeviceEntryType(device["entry_type"]) + if device["entry_type"] + else None + ), hw_version=device["hw_version"], id=device["id"], identifiers={ diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 20948a7130a..086def8a8be 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -52,6 +52,7 @@ from homeassistant.core import ( Event, HassJobType, HomeAssistant, + ReleaseChannel, callback, get_hassjob_callable_job_type, get_release_channel, @@ -657,7 +658,7 @@ class Entity( return name.format(**self.translation_placeholders) except KeyError as err: if not self._name_translation_placeholders_reported: - if get_release_channel() != "stable": + if get_release_channel() is not ReleaseChannel.STABLE: raise HomeAssistantError("Missing placeholder %s" % err) from err report_issue = self._suggest_report_issue() _LOGGER.warning( diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index bed3dea4dc1..ee895e3fd3e 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -11,7 +11,7 @@ from yarl import URL from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STARTED -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant, ReleaseChannel from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( area_registry as ar, @@ -2390,7 +2390,7 @@ async def test_device_name_translation_placeholders( }, }, {"placeholder": "special"}, - "stable", + ReleaseChannel.STABLE, nullcontext(), ( "has translation placeholders '{'placeholder': 'special'}' which do " @@ -2405,7 +2405,7 @@ async def test_device_name_translation_placeholders( }, }, {"placeholder": "special"}, - "beta", + ReleaseChannel.BETA, pytest.raises( HomeAssistantError, match="Missing placeholder '2ndplaceholder'" ), @@ -2419,7 +2419,7 @@ async def test_device_name_translation_placeholders( }, }, None, - "stable", + ReleaseChannel.STABLE, nullcontext(), ( "has translation placeholders '{}' which do " @@ -2434,7 +2434,7 @@ async def test_device_name_translation_placeholders_errors( translation_key: str | None, translations: dict[str, str] | None, placeholders: dict[str, str] | None, - release_channel: str, + release_channel: ReleaseChannel, expectation: AbstractContextManager, expected_error: str, caplog: pytest.LogCaptureFixture, diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 70d917dbc7b..fb2793a75c7 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -28,6 +28,7 @@ from homeassistant.core import ( HassJobType, HomeAssistant, HomeAssistantError, + ReleaseChannel, callback, ) from homeassistant.helpers import device_registry as dr, entity, entity_registry as er @@ -1249,7 +1250,7 @@ async def test_entity_name_translation_placeholders( }, }, {"placeholder": "special"}, - "stable", + ReleaseChannel.STABLE, ( "has translation placeholders '{'placeholder': 'special'}' which do " "not match the name '{placeholder} English ent {2ndplaceholder}'" @@ -1263,7 +1264,7 @@ async def test_entity_name_translation_placeholders( }, }, {"placeholder": "special"}, - "beta", + ReleaseChannel.BETA, "HomeAssistantError: Missing placeholder '2ndplaceholder'", ), ( @@ -1274,7 +1275,7 @@ async def test_entity_name_translation_placeholders( }, }, None, - "stable", + ReleaseChannel.STABLE, ( "has translation placeholders '{}' which do " "not match the name '{placeholder} English ent'" @@ -1287,7 +1288,7 @@ async def test_entity_name_translation_placeholder_errors( translation_key: str | None, translations: dict[str, str] | None, placeholders: dict[str, str] | None, - release_channel: str, + release_channel: ReleaseChannel, expected_error: str, caplog: pytest.LogCaptureFixture, ) -> None: diff --git a/tests/test_core.py b/tests/test_core.py index 5d687d89833..8f0d7f53277 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -42,6 +42,7 @@ from homeassistant.core import ( CoreState, HassJob, HomeAssistant, + ReleaseChannel, ServiceCall, ServiceResponse, State, @@ -3060,13 +3061,15 @@ async def test_validate_state(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("version", "release_channel"), [ - ("0.115.0.dev20200815", "nightly"), - ("0.115.0", "stable"), - ("0.115.0b4", "beta"), - ("0.115.0dev0", "dev"), + ("0.115.0.dev20200815", ReleaseChannel.NIGHTLY), + ("0.115.0", ReleaseChannel.STABLE), + ("0.115.0b4", ReleaseChannel.BETA), + ("0.115.0dev0", ReleaseChannel.DEV), ], ) -async def test_get_release_channel(version: str, release_channel: str) -> None: +async def test_get_release_channel( + version: str, release_channel: ReleaseChannel +) -> None: """Test if release channel detection works from Home Assistant version number.""" with patch("homeassistant.core.__version__", f"{version}"): assert get_release_channel() == release_channel From ee116713cf2838de4b99ef4bd1b6694210a7f6f8 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 21:27:54 +0200 Subject: [PATCH 209/426] Use snapshot test helper in Analytics insights (#115889) --- tests/components/analytics_insights/test_sensor.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/tests/components/analytics_insights/test_sensor.py b/tests/components/analytics_insights/test_sensor.py index e0850bbd55b..3ede971c8f8 100644 --- a/tests/components/analytics_insights/test_sensor.py +++ b/tests/components/analytics_insights/test_sensor.py @@ -16,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_all_entities( @@ -32,17 +32,10 @@ async def test_all_entities( [Platform.SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - async def test_connection_error( hass: HomeAssistant, From 48d1692cd6e26c9aec6e2e10fc562257f5222631 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 20 Apr 2024 21:29:14 +0200 Subject: [PATCH 210/426] Use snapshot test helper in Ambient Network (#115887) --- .../snapshots/test_sensor.ambr | 119 ++++++++++++++++-- .../components/ambient_network/test_sensor.py | 14 +-- 2 files changed, 110 insertions(+), 23 deletions(-) diff --git a/tests/components/ambient_network/snapshots/test_sensor.ambr b/tests/components/ambient_network/snapshots/test_sensor.ambr index 377018c54be..fadb15ad015 100644 --- a/tests/components/ambient_network/snapshots/test_sensor.ambr +++ b/tests/components/ambient_network/snapshots/test_sensor.ambr @@ -10,7 +10,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_absolute_pressure', @@ -22,6 +22,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -38,7 +41,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_absolute_pressure-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'pressure', + 'friendly_name': 'Station A Absolute pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_absolute_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '977.616536580043', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_daily_rain-entry] EntityRegistryEntrySnapshot({ @@ -332,7 +349,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_irradiance', @@ -344,6 +361,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -357,7 +377,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_irradiance-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'irradiance', + 'friendly_name': 'Station A Irradiance', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_irradiance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.64', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_last_rain-entry] EntityRegistryEntrySnapshot({ @@ -368,7 +402,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_last_rain', @@ -393,7 +427,19 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_last_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'timestamp', + 'friendly_name': 'Station A Last rain', + }), + 'context': , + 'entity_id': 'sensor.station_a_last_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-10-30T09:45:00+00:00', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_max_daily_gust-entry] EntityRegistryEntrySnapshot({ @@ -464,7 +510,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_monthly_rain', @@ -476,6 +522,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -492,7 +541,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_monthly_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'precipitation', + 'friendly_name': 'Station A Monthly rain', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_monthly_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_relative_pressure-entry] EntityRegistryEntrySnapshot({ @@ -672,7 +735,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_weekly_rain', @@ -684,6 +747,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -700,7 +766,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_weekly_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'precipitation', + 'friendly_name': 'Station A Weekly rain', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_weekly_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_direction-entry] EntityRegistryEntrySnapshot({ @@ -711,7 +791,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_wind_direction', @@ -723,6 +803,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), }), 'original_device_class': None, 'original_icon': None, @@ -736,7 +819,19 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_direction-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'friendly_name': 'Station A Wind direction', + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.station_a_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_gust-entry] EntityRegistryEntrySnapshot({ diff --git a/tests/components/ambient_network/test_sensor.py b/tests/components/ambient_network/test_sensor.py index b556c0c9c7c..35aa90ffe05 100644 --- a/tests/components/ambient_network/test_sensor.py +++ b/tests/components/ambient_network/test_sensor.py @@ -14,11 +14,12 @@ from homeassistant.helpers import entity_registry as er from .conftest import setup_platform -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @freeze_time("2023-11-08") @pytest.mark.parametrize("config_entry", ["AA:AA:AA:AA:AA:AA"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensors( hass: HomeAssistant, open_api: OpenAPI, @@ -30,16 +31,7 @@ async def test_sensors( """Test all sensors under normal operation.""" await setup_platform(True, hass, config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) @freeze_time("2023-11-09") From b450918f66c951e8dc4cc36fadfb0f5e07805141 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sat, 20 Apr 2024 21:35:02 +0200 Subject: [PATCH 211/426] Bump ruff to 0.4.1 (#115873) --- .pre-commit-config.yaml | 2 +- pyproject.toml | 2 +- requirements_test_pre_commit.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd42fecbfa1..ceb8ee7f9c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.7 + rev: v0.4.1 hooks: - id: ruff args: diff --git a/pyproject.toml b/pyproject.toml index 4b3b15f7bde..91f75c96fd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -659,7 +659,7 @@ filterwarnings = [ ] [tool.ruff] -required-version = ">=0.3.7" +required-version = ">=0.4.1" [tool.ruff.lint] select = [ diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 46ade953da2..4f21f6d4a0c 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.2.6 -ruff==0.3.7 +ruff==0.4.1 yamllint==1.35.1 From 68225abce557d73dc401618428e8d200a8139bbc Mon Sep 17 00:00:00 2001 From: r-binder <40315895+r-binder@users.noreply.github.com> Date: Sat, 20 Apr 2024 23:08:29 +0200 Subject: [PATCH 212/426] Add tls support for AVM Fritz!Tools (#112714) --- homeassistant/components/fritz/__init__.py | 10 +- homeassistant/components/fritz/common.py | 8 +- homeassistant/components/fritz/config_flow.py | 39 ++++- homeassistant/components/fritz/const.py | 4 +- homeassistant/components/fritz/strings.json | 6 +- tests/components/fritz/conftest.py | 14 +- tests/components/fritz/const.py | 11 ++ tests/components/fritz/test_config_flow.py | 143 +++++++++++++++--- tests/components/fritz/test_switch.py | 26 +++- 9 files changed, 210 insertions(+), 51 deletions(-) diff --git a/homeassistant/components/fritz/__init__.py b/homeassistant/components/fritz/__init__.py index ba9e2191901..bab97569eda 100644 --- a/homeassistant/components/fritz/__init__.py +++ b/homeassistant/components/fritz/__init__.py @@ -3,13 +3,20 @@ import logging from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .common import AvmWrapper, FritzData from .const import ( DATA_FRITZ, + DEFAULT_SSL, DOMAIN, FRITZ_AUTH_EXCEPTIONS, FRITZ_EXCEPTIONS, @@ -29,6 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: port=entry.data[CONF_PORT], username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], + use_tls=entry.data.get(CONF_SSL, DEFAULT_SSL), ) try: diff --git a/homeassistant/components/fritz/common.py b/homeassistant/components/fritz/common.py index e4d5e92b742..f051c824847 100644 --- a/homeassistant/components/fritz/common.py +++ b/homeassistant/components/fritz/common.py @@ -48,7 +48,7 @@ from .const import ( DEFAULT_CONF_OLD_DISCOVERY, DEFAULT_DEVICE_NAME, DEFAULT_HOST, - DEFAULT_PORT, + DEFAULT_SSL, DEFAULT_USERNAME, DOMAIN, FRITZ_EXCEPTIONS, @@ -184,9 +184,10 @@ class FritzBoxTools( self, hass: HomeAssistant, password: str, + port: int, username: str = DEFAULT_USERNAME, host: str = DEFAULT_HOST, - port: int = DEFAULT_PORT, + use_tls: bool = DEFAULT_SSL, ) -> None: """Initialize FritzboxTools class.""" super().__init__( @@ -211,6 +212,7 @@ class FritzBoxTools( self.password = password self.port = port self.username = username + self.use_tls = use_tls self.has_call_deflections: bool = False self._model: str | None = None self._current_firmware: str | None = None @@ -230,11 +232,13 @@ class FritzBoxTools( def setup(self) -> None: """Set up FritzboxTools class.""" + self.connection = FritzConnection( address=self.host, port=self.port, user=self.username, password=self.password, + use_tls=self.use_tls, timeout=60.0, pool_maxsize=30, ) diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index a217adf935c..1cfa3af39fb 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -25,14 +25,22 @@ from homeassistant.config_entries import ( OptionsFlow, OptionsFlowWithConfigEntry, ) -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import callback from .const import ( CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY, DEFAULT_HOST, - DEFAULT_PORT, + DEFAULT_HTTP_PORT, + DEFAULT_HTTPS_PORT, + DEFAULT_SSL, DOMAIN, ERROR_AUTH_INVALID, ERROR_CANNOT_CONNECT, @@ -61,6 +69,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._entry: ConfigEntry | None = None self._name: str = "" self._password: str = "" + self._use_tls: bool = False self._port: int | None = None self._username: str = "" self._model: str = "" @@ -74,6 +83,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): port=self._port, user=self._username, password=self._password, + use_tls=self._use_tls, timeout=60.0, pool_maxsize=30, ) @@ -120,6 +130,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: self._password, CONF_PORT: self._port, CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, }, options={ CONF_CONSIDER_HOME: DEFAULT_CONSIDER_HOME.total_seconds(), @@ -133,7 +144,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by discovery.""" ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "") self._host = ssdp_location.hostname - self._port = ssdp_location.port self._name = ( discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME] @@ -178,6 +188,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] + self._use_tls = user_input[CONF_SSL] + self._port = DEFAULT_HTTPS_PORT if self._use_tls else DEFAULT_HTTP_PORT error = await self.hass.async_add_executor_job(self.fritz_tools_init) @@ -191,14 +203,22 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self, errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the setup form to the user.""" + + advanced_data_schema = {} + if self.show_advanced_options: + advanced_data_schema = { + vol.Optional(CONF_PORT): vol.Coerce(int), + } + return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): vol.Coerce(int), + **advanced_data_schema, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } ), errors=errors or {}, @@ -214,6 +234,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } ), description_placeholders={"name": self._name}, @@ -227,9 +248,14 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return self._show_setup_form_init() self._host = user_input[CONF_HOST] - self._port = user_input[CONF_PORT] self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] + self._use_tls = user_input[CONF_SSL] + + if (port := user_input.get(CONF_PORT)) is None: + self._port = DEFAULT_HTTPS_PORT if self._use_tls else DEFAULT_HTTP_PORT + else: + self._port = port if not (error := await self.hass.async_add_executor_job(self.fritz_tools_init)): self._name = self._model @@ -251,6 +277,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._port = entry_data[CONF_PORT] self._username = entry_data[CONF_USERNAME] self._password = entry_data[CONF_PASSWORD] + self._use_tls = entry_data[CONF_SSL] + return await self.async_step_reauth_confirm() def _show_setup_form_reauth_confirm( @@ -295,6 +323,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: self._password, CONF_PORT: self._port, CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, }, ) await self.hass.config_entries.async_reload(self._entry.entry_id) diff --git a/homeassistant/components/fritz/const.py b/homeassistant/components/fritz/const.py index caa7d44c378..3794a83dd7f 100644 --- a/homeassistant/components/fritz/const.py +++ b/homeassistant/components/fritz/const.py @@ -46,8 +46,10 @@ DSL_CONNECTION: Literal["dsl"] = "dsl" DEFAULT_DEVICE_NAME = "Unknown device" DEFAULT_HOST = "192.168.178.1" -DEFAULT_PORT = 49000 +DEFAULT_HTTP_PORT = 49000 +DEFAULT_HTTPS_PORT = 49443 DEFAULT_USERNAME = "" +DEFAULT_SSL = False ERROR_AUTH_INVALID = "invalid_auth" ERROR_CANNOT_CONNECT = "cannot_connect" diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 5eed2f59fc4..4899edb6938 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -25,10 +25,12 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]" }, "data_description": { - "host": "The hostname or IP address of your FRITZ!Box router." + "host": "The hostname or IP address of your FRITZ!Box router.", + "port": "Leave it empty to use the default port." } } }, diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index e32ca55f65d..acf6b0e98cd 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -74,16 +74,6 @@ class FritzConnectionMock: return self._services[service][action] -class FritzHostMock(FritzHosts): - """FritzHosts mocking.""" - - get_mesh_topology = MagicMock() - get_mesh_topology.return_value = MOCK_MESH_DATA - - get_hosts_attributes = MagicMock() - get_hosts_attributes.return_value = MOCK_HOST_ATTRIBUTES_DATA - - @pytest.fixture(name="fc_data") def fc_data_mock(): """Fixture for default fc_data.""" @@ -105,6 +95,8 @@ def fh_class_mock(): """Fixture that sets up a mocked FritzHosts class.""" with patch( "homeassistant.components.fritz.common.FritzHosts", - new=FritzHostMock, + new=FritzHosts, ) as result: + result.get_mesh_topology = MagicMock(return_value=MOCK_MESH_DATA) + result.get_hosts_attributes = MagicMock(return_value=MOCK_HOST_ATTRIBUTES_DATA) yield result diff --git a/tests/components/fritz/const.py b/tests/components/fritz/const.py index ce530e32964..0d1222dfcda 100644 --- a/tests/components/fritz/const.py +++ b/tests/components/fritz/const.py @@ -8,6 +8,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, + CONF_SSL, CONF_USERNAME, ) @@ -22,10 +23,12 @@ MOCK_CONFIG = { CONF_PORT: "1234", CONF_PASSWORD: "fake_pass", CONF_USERNAME: "fake_user", + CONF_SSL: False, } ] } } + MOCK_HOST = "fake_host" MOCK_IPS = { "fritz.box": "192.168.178.1", @@ -902,6 +905,14 @@ MOCK_HOST_ATTRIBUTES_DATA = [ ] MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] +MOCK_USER_INPUT_ADVANCED = MOCK_USER_DATA +MOCK_USER_INPUT_SIMPLE = { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_SSL: False, +} + MOCK_DEVICE_INFO = { ATTR_HOST: MOCK_HOST, ATTR_NEW_SERIAL_NUMBER: MOCK_SERIAL_NUMBER, diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 074d32bf0ca..64bf3cd9064 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -24,7 +24,13 @@ from homeassistant.components.fritz.const import ( ) from homeassistant.components.ssdp import ATTR_UPNP_UDN from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -34,12 +40,59 @@ from .const import ( MOCK_REQUEST, MOCK_SSDP_DATA, MOCK_USER_DATA, + MOCK_USER_INPUT_ADVANCED, + MOCK_USER_INPUT_SIMPLE, ) from tests.common import MockConfigEntry -async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input", "expected_config"), + [ + ( + True, + MOCK_USER_INPUT_ADVANCED, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 1234, + CONF_SSL: False, + }, + ), + ( + False, + MOCK_USER_INPUT_SIMPLE, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49000, + CONF_SSL: False, + }, + ), + ( + False, + {**MOCK_USER_INPUT_SIMPLE, CONF_SSL: True}, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49443, + CONF_SSL: True, + }, + ), + ], +) +async def test_user( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input: dict, + expected_config: dict, +) -> None: """Test starting a flow by user.""" with ( patch( @@ -68,18 +121,20 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N mock_request_post.return_value.text = MOCK_REQUEST result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={ + "source": SOURCE_USER, + "show_advanced_options": show_advanced_options, + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_HOST] == "fake_host" - assert result["data"][CONF_PASSWORD] == "fake_pass" - assert result["data"][CONF_USERNAME] == "fake_user" + assert result["data"] == expected_config assert ( result["options"][CONF_CONSIDER_HOME] == DEFAULT_CONSIDER_HOME.total_seconds() @@ -90,12 +145,20 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N assert mock_setup_entry.called +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) async def test_user_already_configured( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input, ) -> None: """Test starting a flow by user with an already configured device.""" - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config = MockConfigEntry(domain=DOMAIN, data=user_input) mock_config.add_to_hass(hass) with ( @@ -124,13 +187,17 @@ async def test_user_already_configured( mock_request_post.return_value.text = MOCK_REQUEST result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={ + "source": SOURCE_USER, + "show_advanced_options": show_advanced_options, + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=MOCK_USER_INPUT_SIMPLE ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -141,13 +208,22 @@ async def test_user_already_configured( "error", FRITZ_AUTH_EXCEPTIONS, ) +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) async def test_exception_security( - hass: HomeAssistant, mock_get_source_ip, error + hass: HomeAssistant, + mock_get_source_ip, + error, + show_advanced_options: bool, + user_input, ) -> None: """Test starting a flow by user with invalid credentials.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -157,7 +233,7 @@ async def test_exception_security( side_effect=error, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -165,11 +241,21 @@ async def test_exception_security( assert result["errors"]["base"] == ERROR_AUTH_INVALID -async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) +async def test_exception_connection( + hass: HomeAssistant, + mock_get_source_ip, + show_advanced_options: bool, + user_input, +) -> None: """Test starting a flow by user with a connection error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -179,7 +265,7 @@ async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> side_effect=FritzConnectionException, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -187,11 +273,18 @@ async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> assert result["errors"]["base"] == ERROR_CANNOT_CONNECT -async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) +async def test_exception_unknown( + hass: HomeAssistant, mock_get_source_ip, show_advanced_options: bool, user_input +) -> None: """Test starting a flow by user with an unknown exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -201,7 +294,7 @@ async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> Non side_effect=OSError, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -210,7 +303,9 @@ async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> Non async def test_reauth_successful( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, ) -> None: """Test starting a reauthentication flow.""" @@ -273,7 +368,11 @@ async def test_reauth_successful( ], ) async def test_reauth_not_successful( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip, side_effect, error + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + side_effect, + error, ) -> None: """Test starting a reauthentication flow but no connection found.""" diff --git a/tests/components/fritz/test_switch.py b/tests/components/fritz/test_switch.py index adb5c3f6799..b82587d42bd 100644 --- a/tests/components/fritz/test_switch.py +++ b/tests/components/fritz/test_switch.py @@ -15,6 +15,8 @@ from tests.common import MockConfigEntry MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -34,9 +36,11 @@ MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -56,11 +60,13 @@ MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -80,9 +86,11 @@ MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi2"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -102,11 +110,13 @@ MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -126,9 +136,11 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi+"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -148,7 +160,7 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } @@ -179,7 +191,7 @@ async def test_switch_setup( entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert entry.state is ConfigEntryState.LOADED switches = hass.states.async_all(Platform.SWITCH) From 7d386b0d26592c08d46e6ef13c70003bb12ae1a2 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sun, 21 Apr 2024 07:54:24 +1000 Subject: [PATCH 213/426] Fix sensor entity description in Teslemetry (#115614) Add description back to sensor entity --- homeassistant/components/teslemetry/sensor.py | 5 +- .../teslemetry/snapshots/test_sensor.ambr | 538 ++++++++++++++---- 2 files changed, 428 insertions(+), 115 deletions(-) diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index cced1090e2a..6380a4d0c71 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -58,7 +58,7 @@ SHIFT_STATES = {"P": "p", "D": "d", "R": "r", "N": "n"} class TeslemetrySensorEntityDescription(SensorEntityDescription): """Describes Teslemetry Sensor entity.""" - value_fn: Callable[[StateType], StateType | datetime] = lambda x: x + value_fn: Callable[[StateType], StateType] = lambda x: x VEHICLE_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = ( @@ -447,12 +447,13 @@ class TeslemetryVehicleSensorEntity(TeslemetryVehicleEntity, SensorEntity): description: TeslemetrySensorEntityDescription, ) -> None: """Initialize the sensor.""" + self.entity_description = description super().__init__(vehicle, description.key) @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self._value + return self.entity_description.value_fn(self._value) class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity): diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 81142e40901..0d817ad1f7e 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -719,7 +719,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -736,7 +738,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery level', 'platform': 'teslemetry', @@ -744,13 +746,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_level', 'unique_id': 'VINVINVIN-charge_state_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -763,7 +768,10 @@ # name: test_sensors[sensor.test_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -778,7 +786,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -794,8 +804,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery range', 'platform': 'teslemetry', @@ -803,33 +819,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_range', 'unique_id': 'VINVINVIN-charge_state_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_charge_cable-entry] @@ -843,7 +865,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_cable', 'has_entity_name': True, 'hidden_by': None, @@ -896,7 +918,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -912,8 +936,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge energy added', 'platform': 'teslemetry', @@ -921,13 +948,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_energy_added', 'unique_id': 'VINVINVIN-charge_state_charge_energy_added', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_energy_added-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -940,7 +970,10 @@ # name: test_sensors[sensor.test_charge_energy_added-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -955,13 +988,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_rate', 'has_entity_name': True, 'hidden_by': None, @@ -971,8 +1006,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge rate', 'platform': 'teslemetry', @@ -980,13 +1018,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_rate', 'unique_id': 'VINVINVIN-charge_state_charge_rate', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_rate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -999,7 +1040,10 @@ # name: test_sensors[sensor.test_charge_rate-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -1014,13 +1058,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_current', 'has_entity_name': True, 'hidden_by': None, @@ -1031,7 +1077,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger current', 'platform': 'teslemetry', @@ -1039,13 +1085,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_actual_current', 'unique_id': 'VINVINVIN-charge_state_charger_actual_current', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_current-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1058,7 +1107,10 @@ # name: test_sensors[sensor.test_charger_current-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1073,7 +1125,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1090,7 +1144,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger power', 'platform': 'teslemetry', @@ -1098,13 +1152,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_power', 'unique_id': 'VINVINVIN-charge_state_charger_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1117,7 +1174,10 @@ # name: test_sensors[sensor.test_charger_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1132,13 +1192,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_voltage', 'has_entity_name': True, 'hidden_by': None, @@ -1149,7 +1211,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger voltage', 'platform': 'teslemetry', @@ -1157,13 +1219,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_voltage', 'unique_id': 'VINVINVIN-charge_state_charger_voltage', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1176,7 +1241,10 @@ # name: test_sensors[sensor.test_charger_voltage-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1191,7 +1259,16 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1208,7 +1285,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging', 'platform': 'teslemetry', @@ -1222,27 +1299,45 @@ # name: test_sensors[sensor.test_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_charging-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-entry] @@ -1250,7 +1345,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1266,8 +1363,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Distance to arrival', 'platform': 'teslemetry', @@ -1275,26 +1375,32 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_miles_to_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_miles_to_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_distance_to_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.039491', + 'state': '0.063555', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', @@ -1309,13 +1415,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_driver_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1325,8 +1433,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Driver temperature setting', 'platform': 'teslemetry', @@ -1334,13 +1445,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_driver_temp_setting', 'unique_id': 'VINVINVIN-climate_state_driver_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_driver_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1353,7 +1467,10 @@ # name: test_sensors[sensor.test_driver_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1368,7 +1485,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1384,8 +1503,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Estimate battery range', 'platform': 'teslemetry', @@ -1393,33 +1518,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_est_battery_range', 'unique_id': 'VINVINVIN-charge_state_est_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_estimate_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_estimate_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_fast_charger_type-entry] @@ -1433,7 +1564,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_fast_charger_type', 'has_entity_name': True, 'hidden_by': None, @@ -1486,7 +1617,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1502,8 +1635,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Ideal battery range', 'platform': 'teslemetry', @@ -1511,33 +1650,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_ideal_battery_range', 'unique_id': 'VINVINVIN-charge_state_ideal_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_ideal_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_ideal_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_inside_temperature-entry] @@ -1545,7 +1690,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1561,8 +1708,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Inside temperature', 'platform': 'teslemetry', @@ -1570,13 +1720,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_inside_temp', 'unique_id': 'VINVINVIN-climate_state_inside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_inside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1589,7 +1742,10 @@ # name: test_sensors[sensor.test_inside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1604,13 +1760,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_odometer', 'has_entity_name': True, 'hidden_by': None, @@ -1620,8 +1778,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Odometer', 'platform': 'teslemetry', @@ -1629,33 +1793,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_odometer', 'unique_id': 'VINVINVIN-vehicle_state_odometer', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_odometer-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_odometer-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_outside_temperature-entry] @@ -1663,7 +1833,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1679,8 +1851,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Outside temperature', 'platform': 'teslemetry', @@ -1688,13 +1863,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_outside_temp', 'unique_id': 'VINVINVIN-climate_state_outside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_outside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1707,7 +1885,10 @@ # name: test_sensors[sensor.test_outside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1722,13 +1903,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_passenger_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1738,8 +1921,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Passenger temperature setting', 'platform': 'teslemetry', @@ -1747,13 +1933,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_passenger_temp_setting', 'unique_id': 'VINVINVIN-climate_state_passenger_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_passenger_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1766,7 +1955,10 @@ # name: test_sensors[sensor.test_passenger_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1781,13 +1973,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_power', 'has_entity_name': True, 'hidden_by': None, @@ -1798,7 +1992,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Power', 'platform': 'teslemetry', @@ -1806,13 +2000,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_power', 'unique_id': 'VINVINVIN-drive_state_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1825,7 +2022,10 @@ # name: test_sensors[sensor.test_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1840,7 +2040,14 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1857,7 +2064,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Shift state', 'platform': 'teslemetry', @@ -1871,27 +2078,41 @@ # name: test_sensors[sensor.test_shift_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_shift_state-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_speed-entry] @@ -1899,7 +2120,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1915,8 +2138,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Speed', 'platform': 'teslemetry', @@ -1924,33 +2150,39 @@ 'supported_features': 0, 'translation_key': 'drive_state_speed', 'unique_id': 'VINVINVIN-drive_state_speed', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_speed-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_speed-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] @@ -1958,13 +2190,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', 'has_entity_name': True, 'hidden_by': None, @@ -1975,7 +2209,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'State of charge at arrival', 'platform': 'teslemetry', @@ -1983,13 +2217,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_energy_at_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_energy_at_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2002,7 +2239,10 @@ # name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2139,13 +2379,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'has_entity_name': True, 'hidden_by': None, @@ -2155,8 +2397,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front left', 'platform': 'teslemetry', @@ -2164,33 +2412,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-entry] @@ -2198,13 +2452,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'has_entity_name': True, 'hidden_by': None, @@ -2214,8 +2470,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front right', 'platform': 'teslemetry', @@ -2223,33 +2485,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-entry] @@ -2257,13 +2525,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'has_entity_name': True, 'hidden_by': None, @@ -2273,8 +2543,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear left', 'platform': 'teslemetry', @@ -2282,33 +2558,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-entry] @@ -2316,13 +2598,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'has_entity_name': True, 'hidden_by': None, @@ -2332,8 +2616,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear right', 'platform': 'teslemetry', @@ -2341,33 +2631,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_traffic_delay-entry] @@ -2375,7 +2671,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2392,7 +2690,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Traffic delay', 'platform': 'teslemetry', @@ -2400,13 +2698,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_traffic_minutes_delay', 'unique_id': 'VINVINVIN-drive_state_active_route_traffic_minutes_delay', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_traffic_delay-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2419,7 +2720,10 @@ # name: test_sensors[sensor.test_traffic_delay-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2434,7 +2738,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2451,7 +2757,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Usable battery level', 'platform': 'teslemetry', @@ -2459,13 +2765,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_usable_battery_level', 'unique_id': 'VINVINVIN-charge_state_usable_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_usable_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', @@ -2478,7 +2787,10 @@ # name: test_sensors[sensor.test_usable_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', From 29bfed72f70b18c525e21ba060cc5abd772258ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 21 Apr 2024 01:08:40 +0200 Subject: [PATCH 214/426] Fix flaky history stats test (#115824) --- tests/components/history_stats/test_sensor.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 9a7d8ef110a..4b4592c2104 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -1376,9 +1376,12 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None ] } - with patch( - "homeassistant.components.recorder.history.state_changes_during_period", - _fake_states, + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), ): await async_setup_component( hass, From d8117fd2bd93f81bfc5fc9cd3d5ffc3ddcaf53f5 Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Sat, 20 Apr 2024 22:57:05 -0400 Subject: [PATCH 215/426] Fix Roborock status not correctly mapping for some devices (#115646) Use device_info.model instead of name --- homeassistant/components/roborock/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index b72fec5a8e1..12a884dba48 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -107,7 +107,9 @@ async def setup_device( home_data_rooms: list[HomeDataRoom], ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" - mqtt_client = RoborockMqttClientV1(user_data, DeviceData(device, product_info.name)) + mqtt_client = RoborockMqttClientV1( + user_data, DeviceData(device, product_info.model) + ) try: networking = await mqtt_client.get_networking() if networking is None: From 30a60fd38b03009b5dc807962b533b3cfbb91ca2 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Sun, 21 Apr 2024 04:17:11 +0100 Subject: [PATCH 216/426] Improve debug logging for evohome (#110256) better logging --- homeassistant/components/evohome/__init__.py | 21 +++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 3017685a307..49920d79ff3 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -19,7 +19,10 @@ from evohomeasync2.schema.const import ( SZ_ALLOWED_SYSTEM_MODES, SZ_AUTO_WITH_RESET, SZ_CAN_BE_TEMPORARY, + SZ_GATEWAY_ID, + SZ_GATEWAY_INFO, SZ_HEAT_SETPOINT, + SZ_LOCATION_ID, SZ_LOCATION_INFO, SZ_SETPOINT_STATUS, SZ_STATE_STATUS, @@ -261,14 +264,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return False if _LOGGER.isEnabledFor(logging.DEBUG): - _config: dict[str, Any] = { - SZ_LOCATION_INFO: {SZ_TIME_ZONE: None}, - GWS: [{TCS: None}], + loc_info = { + SZ_LOCATION_ID: loc_config[SZ_LOCATION_INFO][SZ_LOCATION_ID], + SZ_TIME_ZONE: loc_config[SZ_LOCATION_INFO][SZ_TIME_ZONE], + } + gwy_info = { + SZ_GATEWAY_ID: loc_config[GWS][0][SZ_GATEWAY_INFO][SZ_GATEWAY_ID], + TCS: loc_config[GWS][0][TCS], + } + _config = { + SZ_LOCATION_INFO: loc_info, + GWS: [{SZ_GATEWAY_INFO: gwy_info, TCS: loc_config[GWS][0][TCS]}], } - _config[SZ_LOCATION_INFO][SZ_TIME_ZONE] = loc_config[SZ_LOCATION_INFO][ - SZ_TIME_ZONE - ] - _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = ev1.EvohomeClient( From 27bccf0b2447c574541142909ec48bf195c6379a Mon Sep 17 00:00:00 2001 From: Adam Goode Date: Sat, 20 Apr 2024 23:20:01 -0400 Subject: [PATCH 217/426] Add test for prometheus export of entities becoming unavailable and available again (#112157) Add test for state change to unavailable and back --- tests/components/prometheus/test_init.py | 121 +++++++++++++++++++++++ 1 file changed, 121 insertions(+) diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 99b73209ad7..499d1a5df14 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -57,6 +57,7 @@ from homeassistant.const import ( STATE_ON, STATE_OPEN, STATE_OPENING, + STATE_UNAVAILABLE, STATE_UNLOCKED, UnitOfEnergy, UnitOfTemperature, @@ -1053,6 +1054,126 @@ async def test_disabling_entity( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_entity_becomes_unavailable_with_export( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + client: ClientSessionGenerator, + sensor_entities: dict[str, er.RegistryEntry], +) -> None: + """Test an entity that becomes unavailable is still exported.""" + data = {**sensor_entities} + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + # Make sensor_1 unavailable. + set_state_with_entry( + hass, data["sensor_1"], STATE_UNAVAILABLE, data["sensor_1_attributes"] + ) + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + # Check that only the availability changed on sensor_1. + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 2.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 0.0' in body + ) + + # The other sensor should be unchanged. + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + # Bring sensor_1 back and check that it is correct. + set_state_with_entry(hass, data["sensor_1"], 200.0, data["sensor_1_attributes"]) + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 200.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 3.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + @pytest.fixture(name="sensor_entities") async def sensor_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry From b592225a8720aeb3a8e7b0b7dface7e7c78116e5 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 21 Apr 2024 08:54:23 +0200 Subject: [PATCH 218/426] Improve service validation exception test and translation key (#115843) * Small improvement to service validation exception test and translation key * Apply suggestions from code review Co-authored-by: Martin Hjelmare * Refactor string assertion --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/homeassistant/strings.json | 2 +- homeassistant/core.py | 2 +- tests/test_core.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index d46a2e50bfd..09b2f17c947 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -192,7 +192,7 @@ "service_not_found": { "message": "Service {domain}.{service} not found." }, - "service_does_not_supports_reponse": { + "service_does_not_support_response": { "message": "A service which does not return responses can't be called with {return_response}." }, "service_lacks_response_request": { diff --git a/homeassistant/core.py b/homeassistant/core.py index 919e0adb758..8471d2c4dcc 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -2589,7 +2589,7 @@ class ServiceRegistry: if handler.supports_response is SupportsResponse.NONE: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key="service_does_not_supports_reponse", + translation_key="service_does_not_support_response", translation_placeholders={ "return_response": "return_response=True" }, diff --git a/tests/test_core.py b/tests/test_core.py index 8f0d7f53277..ce71fcd42e5 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1803,9 +1803,9 @@ async def test_services_call_return_response_requires_blocking( blocking=False, return_response=True, ) - assert ( - str(exc.value) - == "A non blocking service call with argument blocking=False can't be used together with argument return_response=True" + assert str(exc.value) == ( + "A non blocking service call with argument blocking=False " + "can't be used together with argument return_response=True" ) From 1c0c0bb0bc0e3e0ecdae11c84dc4154e22358265 Mon Sep 17 00:00:00 2001 From: wittypluck Date: Sun, 21 Apr 2024 11:08:39 +0200 Subject: [PATCH 219/426] Allow manual delete of stale Unifi device from UI (#115267) * Allow manual delete of stale device from UI * Add unit tests for remove_config_entry_device --- homeassistant/components/unifi/__init__.py | 13 +++ tests/components/unifi/test_init.py | 100 ++++++++++++++++++++- 2 files changed, 112 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/unifi/__init__.py b/homeassistant/components/unifi/__init__.py index 5174a1a7796..69a6ec423ae 100644 --- a/homeassistant/components/unifi/__init__.py +++ b/homeassistant/components/unifi/__init__.py @@ -7,6 +7,7 @@ from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType @@ -73,6 +74,18 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return await hub.async_reset() +async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry +) -> bool: + """Remove config entry from a device.""" + hub: UnifiHub = hass.data[UNIFI_DOMAIN][config_entry.entry_id] + return not any( + identifier + for _, identifier in device_entry.connections + if identifier in hub.api.clients or identifier in hub.api.devices + ) + + class UnifiWirelessClients: """Class to store clients known to be wireless. diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index 9053b47cbaf..bd9a29f2c8b 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -3,10 +3,20 @@ from typing import Any from unittest.mock import patch +from aiounifi.models.message import MessageKey + +from homeassistant import loader from homeassistant.components import unifi -from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN +from homeassistant.components.unifi.const import ( + CONF_ALLOW_BANDWIDTH_SENSORS, + CONF_ALLOW_UPTIME_SENSORS, + CONF_TRACK_CLIENTS, + CONF_TRACK_DEVICES, + DOMAIN as UNIFI_DOMAIN, +) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from .test_hub import DEFAULT_CONFIG_ENTRY_ID, setup_unifi_integration @@ -103,3 +113,91 @@ async def test_wireless_clients( "00:00:00:00:00:01", "00:00:00:00:00:02", ] + + +async def test_remove_config_entry_device( + hass: HomeAssistant, + hass_storage: dict[str, Any], + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, + mock_unifi_websocket, +) -> None: + """Verify removing a device manually.""" + client_1 = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes": 1234000000, + "wired-tx_bytes": 5678000000, + "uptime": 1600094505, + } + client_2 = { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes": 2345000000, + "tx_bytes": 6789000000, + "uptime": 60, + } + device_1 = { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device 1", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "version": "4.0.42.10433", + } + options = { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + CONF_TRACK_CLIENTS: True, + CONF_TRACK_DEVICES: True, + } + + config_entry = await setup_unifi_integration( + hass, + aioclient_mock, + options=options, + clients_response=[client_1, client_2], + devices_response=[device_1], + ) + + integration = await loader.async_get_integration(hass, config_entry.domain) + component = await integration.async_get_component() + + # Remove a client + mock_unifi_websocket(message=MessageKey.CLIENT_REMOVED, data=[client_2]) + await hass.async_block_till_done() + + # Try to remove an active client: not allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, client_1["mac"])} + ) + assert not await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) + # Try to remove an active device: not allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, device_1["mac"])} + ) + assert not await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) + # Try to remove an inactive client: allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, client_2["mac"])} + ) + assert await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) From ec066472ae47b5533f77784c3c4c1d4b2bb92ebe Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 21 Apr 2024 11:44:58 +0200 Subject: [PATCH 220/426] Fix geo location attributes of Tankerkoenig sensors (#115914) * geo location attributes needs to be float * make mypy happy --- homeassistant/components/tankerkoenig/sensor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tankerkoenig/sensor.py b/homeassistant/components/tankerkoenig/sensor.py index f2fdc2c45b7..33476e75262 100644 --- a/homeassistant/components/tankerkoenig/sensor.py +++ b/homeassistant/components/tankerkoenig/sensor.py @@ -91,7 +91,7 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): self._fuel_type = fuel_type self._attr_translation_key = fuel_type self._attr_unique_id = f"{station.id}_{fuel_type}" - attrs = { + attrs: dict[str, int | str | float | None] = { ATTR_BRAND: station.brand, ATTR_FUEL_TYPE: fuel_type, ATTR_STATION_NAME: station.name, @@ -102,8 +102,8 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): } if coordinator.show_on_map: - attrs[ATTR_LATITUDE] = str(station.lat) - attrs[ATTR_LONGITUDE] = str(station.lng) + attrs[ATTR_LATITUDE] = station.lat + attrs[ATTR_LONGITUDE] = station.lng self._attr_extra_state_attributes = attrs @property From 95b858648eb30eac0def1873b91de3d00ef1ec4e Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 21 Apr 2024 17:36:19 +0200 Subject: [PATCH 221/426] Refactor Totalconnect binary sensor (#115629) --- .../components/totalconnect/binary_sensor.py | 289 ++++++++++-------- 1 file changed, 159 insertions(+), 130 deletions(-) diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 6043d15d2d4..696f0dbcf6f 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -1,7 +1,12 @@ """Interfaces with TotalConnect sensors.""" +from collections.abc import Callable +from dataclasses import dataclass import logging +from total_connect_client.location import TotalConnectLocation +from total_connect_client.zone import TotalConnectZone + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -12,7 +17,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN LOW_BATTERY = "low_battery" @@ -23,172 +30,194 @@ ZONE = "zone" _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class TotalConnectZoneBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes TotalConnect binary sensor entity.""" + + device_class_fn: Callable[[TotalConnectZone], BinarySensorDeviceClass] | None = None + is_on_fn: Callable[[TotalConnectZone], bool] + + +def get_security_zone_device_class(zone: TotalConnectZone) -> BinarySensorDeviceClass: + """Return the device class of a TotalConnect security zone.""" + if zone.is_type_fire(): + return BinarySensorDeviceClass.SMOKE + if zone.is_type_carbon_monoxide(): + return BinarySensorDeviceClass.GAS + if zone.is_type_motion(): + return BinarySensorDeviceClass.MOTION + if zone.is_type_medical(): + return BinarySensorDeviceClass.SAFETY + if zone.is_type_temperature(): + return BinarySensorDeviceClass.PROBLEM + return BinarySensorDeviceClass.DOOR + + +SECURITY_BINARY_SENSOR = TotalConnectZoneBinarySensorEntityDescription( + key=ZONE, + name="", + device_class_fn=get_security_zone_device_class, + is_on_fn=lambda zone: zone.is_faulted() or zone.is_triggered(), +) + +NO_BUTTON_BINARY_SENSORS: tuple[TotalConnectZoneBinarySensorEntityDescription, ...] = ( + TotalConnectZoneBinarySensorEntityDescription( + key=LOW_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + name=" low battery", + is_on_fn=lambda zone: zone.is_low_battery(), + ), + TotalConnectZoneBinarySensorEntityDescription( + key=TAMPER, + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + name=f" {TAMPER}", + is_on_fn=lambda zone: zone.is_tampered(), + ), +) + + +@dataclass(frozen=True, kw_only=True) +class TotalConnectAlarmBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes TotalConnect binary sensor entity.""" + + is_on_fn: Callable[[TotalConnectLocation], bool] + + +LOCATION_BINARY_SENSORS: tuple[TotalConnectAlarmBinarySensorEntityDescription, ...] = ( + TotalConnectAlarmBinarySensorEntityDescription( + key=LOW_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + name=" low battery", + is_on_fn=lambda location: location.is_low_battery(), + ), + TotalConnectAlarmBinarySensorEntityDescription( + key=TAMPER, + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + name=f" {TAMPER}", + is_on_fn=lambda location: location.is_cover_tampered(), + ), + TotalConnectAlarmBinarySensorEntityDescription( + key=POWER, + device_class=BinarySensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + name=f" {POWER}", + is_on_fn=lambda location: location.is_ac_loss(), + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect device sensors based on a config entry.""" sensors: list = [] - client_locations = hass.data[DOMAIN][entry.entry_id].client.locations + coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + client_locations = coordinator.client.locations for location_id, location in client_locations.items(): - sensors.append(TotalConnectAlarmLowBatteryBinarySensor(location)) - sensors.append(TotalConnectAlarmTamperBinarySensor(location)) - sensors.append(TotalConnectAlarmPowerBinarySensor(location)) + sensors.extend( + TotalConnectAlarmBinarySensor(coordinator, description, location) + for description in LOCATION_BINARY_SENSORS + ) for zone in location.zones.values(): - sensors.append(TotalConnectZoneSecurityBinarySensor(location_id, zone)) + sensors.append( + TotalConnectZoneBinarySensor( + coordinator, SECURITY_BINARY_SENSOR, location_id, zone + ) + ) if not zone.is_type_button(): - sensors.append(TotalConnectLowBatteryBinarySensor(location_id, zone)) - sensors.append(TotalConnectTamperBinarySensor(location_id, zone)) + sensors.extend( + TotalConnectZoneBinarySensor( + coordinator, + description, + location_id, + zone, + ) + for description in NO_BUTTON_BINARY_SENSORS + ) - async_add_entities(sensors, True) + async_add_entities(sensors) -class TotalConnectZoneBinarySensor(BinarySensorEntity): +class TotalConnectZoneBinarySensor( + CoordinatorEntity[TotalConnectDataUpdateCoordinator], BinarySensorEntity +): """Represent an TotalConnect zone.""" - def __init__(self, location_id, zone): + entity_description: TotalConnectZoneBinarySensorEntityDescription + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + entity_description: TotalConnectZoneBinarySensorEntityDescription, + location_id: str, + zone: TotalConnectZone, + ) -> None: """Initialize the TotalConnect status.""" + super().__init__(coordinator) + self.entity_description = entity_description self._location_id = location_id self._zone = zone - self._attr_name = f"{zone.description}{self.entity_description.name}" - self._attr_unique_id = ( - f"{location_id}_{zone.zoneid}_{self.entity_description.key}" - ) + self._attr_name = f"{zone.description}{entity_description.name}" + self._attr_unique_id = f"{location_id}_{zone.zoneid}_{entity_description.key}" self._attr_is_on = None self._attr_extra_state_attributes = { - "zone_id": self._zone.zoneid, + "zone_id": zone.zoneid, "location_id": self._location_id, - "partition": self._zone.partition, + "partition": zone.partition, } - - @property - def device_info(self) -> DeviceInfo: - """Return device info.""" - identifier = self._zone.sensor_serial_number or f"zone_{self._zone.zoneid}" - return DeviceInfo( - name=self._zone.description, + identifier = zone.sensor_serial_number or f"zone_{zone.zoneid}" + self._attr_device_info = DeviceInfo( + name=zone.description, identifiers={(DOMAIN, identifier)}, - serial_number=self._zone.sensor_serial_number, + serial_number=zone.sensor_serial_number, ) - -class TotalConnectZoneSecurityBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect security zone.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=ZONE, name="" - ) + @property + def is_on(self) -> bool: + """Return the state of the entity.""" + return self.entity_description.is_on_fn(self._zone) @property - def device_class(self): + def device_class(self) -> BinarySensorDeviceClass | None: """Return the class of this zone.""" - if self._zone.is_type_fire(): - return BinarySensorDeviceClass.SMOKE - if self._zone.is_type_carbon_monoxide(): - return BinarySensorDeviceClass.GAS - if self._zone.is_type_motion(): - return BinarySensorDeviceClass.MOTION - if self._zone.is_type_medical(): - return BinarySensorDeviceClass.SAFETY - if self._zone.is_type_temperature(): - return BinarySensorDeviceClass.PROBLEM - return BinarySensorDeviceClass.DOOR - - def update(self): - """Return the state of the device.""" - if self._zone.is_faulted() or self._zone.is_triggered(): - self._attr_is_on = True - else: - self._attr_is_on = False + if self.entity_description.device_class_fn: + return self.entity_description.device_class_fn(self._zone) + return super().device_class -class TotalConnectLowBatteryBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect zone low battery status.""" +class TotalConnectAlarmBinarySensor( + CoordinatorEntity[TotalConnectDataUpdateCoordinator], BinarySensorEntity +): + """Represent a TotalConnect alarm device binary sensors.""" - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=LOW_BATTERY, - device_class=BinarySensorDeviceClass.BATTERY, - entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", - ) + entity_description: TotalConnectAlarmBinarySensorEntityDescription - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._zone.is_low_battery() - - -class TotalConnectTamperBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect zone tamper status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=TAMPER, - device_class=BinarySensorDeviceClass.TAMPER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._zone.is_tampered() - - -class TotalConnectAlarmBinarySensor(BinarySensorEntity): - """Represent an TotalConnect alarm device binary sensors.""" - - def __init__(self, location): + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + entity_description: TotalConnectAlarmBinarySensorEntityDescription, + location: TotalConnectLocation, + ) -> None: """Initialize the TotalConnect alarm device binary sensor.""" + super().__init__(coordinator) + self.entity_description = entity_description self._location = location - self._attr_name = f"{location.location_name}{self.entity_description.name}" - self._attr_unique_id = f"{location.location_id}_{self.entity_description.key}" - self._attr_is_on = None + self._attr_name = f"{location.location_name}{entity_description.name}" + self._attr_unique_id = f"{location.location_id}_{entity_description.key}" self._attr_extra_state_attributes = { - "location_id": self._location.location_id, + "location_id": location.location_id, } - -class TotalConnectAlarmLowBatteryBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect Alarm low battery status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=LOW_BATTERY, - device_class=BinarySensorDeviceClass.BATTERY, - entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._location.is_low_battery() - - -class TotalConnectAlarmTamperBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect alarm tamper status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=TAMPER, - device_class=BinarySensorDeviceClass.TAMPER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._location.is_cover_tampered() - - -class TotalConnectAlarmPowerBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect alarm power status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=POWER, - device_class=BinarySensorDeviceClass.POWER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {POWER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = not self._location.is_ac_loss() + @property + def is_on(self) -> bool: + """Return the state of the entity.""" + return self.entity_description.is_on_fn(self._location) From 83370a5bde1c386c668431d97b851ac415956227 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Sun, 21 Apr 2024 20:27:44 +0200 Subject: [PATCH 222/426] Remove sensor exposing UniFi WLAN password (#115929) --- homeassistant/components/unifi/sensor.py | 13 ----- tests/components/unifi/test_sensor.py | 71 ------------------------ 2 files changed, 84 deletions(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 360f40384c9..7d9720cde1a 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -350,19 +350,6 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( value_fn=async_device_state_value_fn, options=list(DEVICE_STATES.values()), ), - UnifiSensorEntityDescription[Wlans, Wlan]( - key="WLAN password", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - api_handler_fn=lambda api: api.wlans, - available_fn=async_wlan_available_fn, - device_info_fn=async_wlan_device_info_fn, - name_fn=lambda wlan: "Password", - object_fn=lambda api, obj_id: api.wlans[obj_id], - supported_fn=lambda hub, obj_id: hub.api.wlans[obj_id].x_passphrase is not None, - unique_id_fn=lambda hub, obj_id: f"password-{obj_id}", - value_fn=lambda hub, obj: obj.x_passphrase, - ), UnifiSensorEntityDescription[Devices, Device]( key="Device CPU utilization", entity_category=EntityCategory.DIAGNOSTIC, diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index e8f9f763409..e3b4ddd3b63 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1000,77 +1000,6 @@ async def test_device_state( assert hass.states.get("sensor.device_state").state == DEVICE_STATES[i] -async def test_wlan_password( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_unifi_websocket, - websocket_mock, -) -> None: - """Test the WLAN password sensor behavior.""" - await setup_unifi_integration(hass, aioclient_mock, wlans_response=[WLAN]) - - sensor_password = "sensor.ssid_1_password" - password = "password" - new_password = "new_password" - - ent_reg_entry = entity_registry.async_get(sensor_password) - assert ent_reg_entry.unique_id == "password-012345678910111213141516" - assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - - # Enable entity - entity_registry.async_update_entity(entity_id=sensor_password, disabled_by=None) - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done() - - # Validate state object - wlan_password_sensor_1 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state == password - - # Update state object - same password - no change to state - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=WLAN) - await hass.async_block_till_done() - wlan_password_sensor_2 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state == wlan_password_sensor_2.state - - # Update state object - changed password - new state - data = deepcopy(WLAN) - data["x_passphrase"] = new_password - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=data) - await hass.async_block_till_done() - wlan_password_sensor_3 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state != wlan_password_sensor_3.state - - # Availability signaling - - # Controller disconnects - await websocket_mock.disconnect() - assert hass.states.get(sensor_password).state == STATE_UNAVAILABLE - - # Controller reconnects - await websocket_mock.reconnect() - assert hass.states.get(sensor_password).state == new_password - - # WLAN gets disabled - wlan_1 = deepcopy(WLAN) - wlan_1["enabled"] = False - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() - assert hass.states.get(sensor_password).state == STATE_UNAVAILABLE - - # WLAN gets re-enabled - wlan_1["enabled"] = True - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() - assert hass.states.get(sensor_password).state == password - - async def test_device_system_stats( hass: HomeAssistant, entity_registry: er.EntityRegistry, From ddb415b77e655e6b56366aef8fc0ffa9ff997f7b Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Sun, 21 Apr 2024 11:27:50 -0700 Subject: [PATCH 223/426] Bump total_connect_client to 2023.12.1 (#115928) bump total_connect_client to 2023.12.1 --- homeassistant/components/totalconnect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/totalconnect/manifest.json b/homeassistant/components/totalconnect/manifest.json index 183919f05f2..d1afb01210d 100644 --- a/homeassistant/components/totalconnect/manifest.json +++ b/homeassistant/components/totalconnect/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/totalconnect", "iot_class": "cloud_polling", "loggers": ["total_connect_client"], - "requirements": ["total-connect-client==2023.2"] + "requirements": ["total-connect-client==2023.12.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1f066526f58..4a4ef23b583 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2731,7 +2731,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2023.2 +total-connect-client==2023.12.1 # homeassistant.components.tplink_lte tp-connected==0.0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 91a3c65c3fd..b935fcbaf42 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2108,7 +2108,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2023.2 +total-connect-client==2023.12.1 # homeassistant.components.tplink_omada tplink-omada-client==1.3.12 From 2620443a888ab683fd8dd4c89e13e885b8b718c2 Mon Sep 17 00:00:00 2001 From: mkmer Date: Sun, 21 Apr 2024 15:19:48 -0400 Subject: [PATCH 224/426] Add error translations to Blink (#115924) * Add translations Catch timeout in snap * Grammer cleanup --- homeassistant/components/blink/camera.py | 19 +++++++++++++++---- homeassistant/components/blink/strings.json | 21 ++++++++++++++++++--- homeassistant/components/blink/switch.py | 6 ++++-- 3 files changed, 37 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index 318bb18772a..7461d7b2a2b 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Mapping -import contextlib import logging from typing import Any @@ -97,7 +96,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): await self._camera.async_arm(True) except TimeoutError as er: - raise HomeAssistantError("Blink failed to arm camera") from er + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_arm", + ) from er self._camera.motion_enabled = True await self.coordinator.async_refresh() @@ -107,7 +109,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): try: await self._camera.async_arm(False) except TimeoutError as er: - raise HomeAssistantError("Blink failed to disarm camera") from er + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_disarm", + ) from er self._camera.motion_enabled = False await self.coordinator.async_refresh() @@ -124,8 +129,14 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): async def trigger_camera(self) -> None: """Trigger camera to take a snapshot.""" - with contextlib.suppress(TimeoutError): + try: await self._camera.snap_picture() + except TimeoutError as er: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_snap", + ) from er + self.async_write_ha_state() def camera_image( diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index 2260acede1c..2c0be3d972c 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -106,16 +106,31 @@ }, "exceptions": { "integration_not_found": { - "message": "Integration \"{target}\" not found in registry" + "message": "Integration \"{target}\" not found in registry." }, "no_path": { "message": "Can't write to directory {target}, no access to path!" }, "cant_write": { - "message": "Can't write to file" + "message": "Can't write to file." }, "not_loaded": { - "message": "{target} is not loaded" + "message": "{target} is not loaded." + }, + "failed_arm": { + "message": "Blink failed to arm camera." + }, + "failed_disarm": { + "message": "Blink failed to disarm camera." + }, + "failed_snap": { + "message": "Blink failed to snap a picture." + }, + "failed_arm_motion": { + "message": "Blink failed to arm camera motion detection." + }, + "failed_disarm_motion": { + "message": "Blink failed to disarm camera motion detection." } }, "issues": { diff --git a/homeassistant/components/blink/switch.py b/homeassistant/components/blink/switch.py index 1bfd257ecbe..ab9b825ded1 100644 --- a/homeassistant/components/blink/switch.py +++ b/homeassistant/components/blink/switch.py @@ -75,7 +75,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity): except TimeoutError as er: raise HomeAssistantError( - "Blink failed to arm camera motion detection" + translation_domain=DOMAIN, + translation_key="failed_arm_motion", ) from er await self.coordinator.async_refresh() @@ -87,7 +88,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity): except TimeoutError as er: raise HomeAssistantError( - "Blink failed to dis-arm camera motion detection" + translation_domain=DOMAIN, + translation_key="failed_disarm_motion", ) from er await self.coordinator.async_refresh() From 5a24690d795d9c233541e3fc55d0a733a12c90dc Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 21 Apr 2024 22:26:57 +0200 Subject: [PATCH 225/426] Make use of snapshot testing in Synology DSM (#115931) --- .../snapshots/test_config_flow.ambr | 86 +++++++++++++++++ .../synology_dsm/test_config_flow.py | 95 +++++-------------- 2 files changed, 110 insertions(+), 71 deletions(-) create mode 100644 tests/components/synology_dsm/snapshots/test_config_flow.ambr diff --git a/tests/components/synology_dsm/snapshots/test_config_flow.ambr b/tests/components/synology_dsm/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..807ec764e52 --- /dev/null +++ b/tests/components/synology_dsm/snapshots/test_config_flow.ambr @@ -0,0 +1,86 @@ +# serializer version: 1 +# name: test_discovered_via_zeroconf + dict({ + 'host': '192.168.1.5', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_form_ssdp + dict({ + 'host': '192.168.1.5', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 1234, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user.1 + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5000, + 'ssl': False, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user_2sa + dict({ + 'device_token': 'Dév!cè_T0k€ñ', + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user_vdsm + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 1234, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 483e22f2359..85814f84aad 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -11,19 +11,15 @@ from synology_dsm.exceptions import ( SynologyDSMLoginInvalidException, SynologyDSMRequestException, ) +from syrupy import SnapshotAssertion from homeassistant.components import ssdp, zeroconf from homeassistant.components.synology_dsm.config_flow import CONF_OTP_CODE from homeassistant.components.synology_dsm.const import ( CONF_SNAPSHOT_QUALITY, - CONF_VOLUMES, - DEFAULT_PORT, - DEFAULT_PORT_SSL, DEFAULT_SCAN_INTERVAL, DEFAULT_SNAPSHOT_QUALITY, DEFAULT_TIMEOUT, - DEFAULT_USE_SSL, - DEFAULT_VERIFY_SSL, DOMAIN, ) from homeassistant.config_entries import ( @@ -33,7 +29,6 @@ from homeassistant.config_entries import ( SOURCE_ZEROCONF, ) from homeassistant.const import ( - CONF_DISKS, CONF_HOST, CONF_MAC, CONF_PASSWORD, @@ -149,7 +144,11 @@ def mock_controller_service_failed(): @pytest.mark.usefixtures("mock_setup_entry") -async def test_user(hass: HomeAssistant, service: MagicMock) -> None: +async def test_user( + hass: HomeAssistant, + service: MagicMock, + snapshot: SnapshotAssertion, +) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=None @@ -177,16 +176,7 @@ async def test_user(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["data"][CONF_SSL] == USE_SSL - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot service.information.serial = SERIAL_2 with patch( @@ -208,20 +198,13 @@ async def test_user(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL_2 assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT - assert not result["data"][CONF_SSL] - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") -async def test_user_2sa(hass: HomeAssistant, service_2sa: MagicMock) -> None: +async def test_user_2sa( + hass: HomeAssistant, service_2sa: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test user with 2sa authentication config.""" with patch( "homeassistant.components.synology_dsm.config_flow.SynologyDSM", @@ -261,20 +244,13 @@ async def test_user_2sa(hass: HomeAssistant, service_2sa: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT_SSL - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") == DEVICE_TOKEN - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") -async def test_user_vdsm(hass: HomeAssistant, service_vdsm: MagicMock) -> None: +async def test_user_vdsm( + hass: HomeAssistant, service_vdsm: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test user config.""" with patch( "homeassistant.components.synology_dsm.config_flow.SynologyDSM", @@ -306,16 +282,7 @@ async def test_user_vdsm(hass: HomeAssistant, service_vdsm: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["data"][CONF_SSL] == USE_SSL - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") @@ -467,7 +434,9 @@ async def test_missing_data_after_login( @pytest.mark.usefixtures("mock_setup_entry") -async def test_form_ssdp(hass: HomeAssistant, service: MagicMock) -> None: +async def test_form_ssdp( + hass: HomeAssistant, service: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test we can setup from ssdp.""" result = await hass.config_entries.flow.async_init( @@ -498,16 +467,7 @@ async def test_form_ssdp(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == "mydsm" - assert result["data"][CONF_HOST] == "192.168.1.5" - assert result["data"][CONF_PORT] == 5001 - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") @@ -664,7 +624,9 @@ async def test_options_flow(hass: HomeAssistant, service: MagicMock) -> None: @pytest.mark.usefixtures("mock_setup_entry") -async def test_discovered_via_zeroconf(hass: HomeAssistant, service: MagicMock) -> None: +async def test_discovered_via_zeroconf( + hass: HomeAssistant, service: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( @@ -697,16 +659,7 @@ async def test_discovered_via_zeroconf(hass: HomeAssistant, service: MagicMock) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == "mydsm" - assert result["data"][CONF_HOST] == "192.168.1.5" - assert result["data"][CONF_PORT] == 5001 - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") From 423544401ea191482c56fd03b7954c22c7c8ea6c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 21 Apr 2024 22:33:58 +0200 Subject: [PATCH 226/426] Convert MQTT to use asyncio (#115910) --- homeassistant/components/mqtt/__init__.py | 14 +- homeassistant/components/mqtt/client.py | 292 +++++++++++++++++----- tests/common.py | 2 +- tests/components/mqtt/test_init.py | 206 ++++++++++++++- tests/components/tasmota/test_common.py | 12 +- tests/conftest.py | 28 ++- 6 files changed, 464 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 28cb7d0944b..cc1ae3ddce1 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -265,7 +265,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: conf: dict[str, Any] mqtt_data: MqttData - async def _setup_client() -> tuple[MqttData, dict[str, Any]]: + async def _setup_client( + client_available: asyncio.Future[bool], + ) -> tuple[MqttData, dict[str, Any]]: """Set up the MQTT client.""" # Fetch configuration conf = dict(entry.data) @@ -294,7 +296,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.add_update_listener(_async_config_entry_updated) ) - await mqtt_data.client.async_connect() + await mqtt_data.client.async_connect(client_available) return (mqtt_data, conf) client_available: asyncio.Future[bool] @@ -303,13 +305,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: else: client_available = hass.data[DATA_MQTT_AVAILABLE] - setup_ok: bool = False - try: - mqtt_data, conf = await _setup_client() - setup_ok = True - finally: - if not client_available.done(): - client_available.set_result(setup_ok) + mqtt_data, conf = await _setup_client(client_available) async def async_publish_service(call: ServiceCall) -> None: """Handle MQTT publish service calls.""" diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 978123e169c..021ecf1cc36 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -3,12 +3,14 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Iterable +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +import contextlib from dataclasses import dataclass -from functools import lru_cache +from functools import lru_cache, partial from itertools import chain, groupby import logging from operator import attrgetter +import socket import ssl import time from typing import TYPE_CHECKING, Any @@ -35,7 +37,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import dispatcher_send +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util import dt as dt_util @@ -92,6 +94,9 @@ INITIAL_SUBSCRIBE_COOLDOWN = 1.0 SUBSCRIBE_COOLDOWN = 0.1 UNSUBSCRIBE_COOLDOWN = 0.1 TIMEOUT_ACK = 10 +RECONNECT_INTERVAL_SECONDS = 10 + +SocketType = socket.socket | ssl.SSLSocket | Any SubscribePayloadType = str | bytes # Only bytes if encoding is None @@ -258,7 +263,9 @@ class MqttClientSetup: # However, that feature is not mandatory so we generate our own. client_id = mqtt.base62(uuid.uuid4().int, padding=22) transport = config.get(CONF_TRANSPORT, DEFAULT_TRANSPORT) - self._client = mqtt.Client(client_id, protocol=proto, transport=transport) + self._client = mqtt.Client( + client_id, protocol=proto, transport=transport, reconnect_on_failure=False + ) # Enable logging self._client.enable_logger() @@ -404,12 +411,17 @@ class MQTT: self._ha_started = asyncio.Event() self._cleanup_on_unload: list[Callable[[], None]] = [] - self._paho_lock = asyncio.Lock() # Prevents parallel calls to the MQTT client + self._connection_lock = asyncio.Lock() self._pending_operations: dict[int, asyncio.Event] = {} self._pending_operations_condition = asyncio.Condition() self._subscribe_debouncer = EnsureJobAfterCooldown( INITIAL_SUBSCRIBE_COOLDOWN, self._async_perform_subscriptions ) + self._misc_task: asyncio.Task | None = None + self._reconnect_task: asyncio.Task | None = None + self._should_reconnect: bool = True + self._available_future: asyncio.Future[bool] | None = None + self._max_qos: dict[str, int] = {} # topic, max qos self._pending_subscriptions: dict[str, int] = {} # topic, qos self._unsubscribe_debouncer = EnsureJobAfterCooldown( @@ -456,25 +468,140 @@ class MQTT: while self._cleanup_on_unload: self._cleanup_on_unload.pop()() + @contextlib.asynccontextmanager + async def _async_connect_in_executor(self) -> AsyncGenerator[None, None]: + # While we are connecting in the executor we need to + # handle on_socket_open and on_socket_register_write + # in the executor as well. + mqttc = self._mqttc + try: + mqttc.on_socket_open = self._on_socket_open + mqttc.on_socket_register_write = self._on_socket_register_write + yield + finally: + # Once the executor job is done, we can switch back to + # handling these in the event loop. + mqttc.on_socket_open = self._async_on_socket_open + mqttc.on_socket_register_write = self._async_on_socket_register_write + def init_client(self) -> None: """Initialize paho client.""" - self._mqttc = MqttClientSetup(self.conf).client - self._mqttc.on_connect = self._mqtt_on_connect - self._mqttc.on_disconnect = self._mqtt_on_disconnect - self._mqttc.on_message = self._mqtt_on_message - self._mqttc.on_publish = self._mqtt_on_callback - self._mqttc.on_subscribe = self._mqtt_on_callback - self._mqttc.on_unsubscribe = self._mqtt_on_callback + mqttc = MqttClientSetup(self.conf).client + # on_socket_unregister_write and _async_on_socket_close + # are only ever called in the event loop + mqttc.on_socket_close = self._async_on_socket_close + mqttc.on_socket_unregister_write = self._async_on_socket_unregister_write + + # These will be called in the event loop + mqttc.on_connect = self._async_mqtt_on_connect + mqttc.on_disconnect = self._async_mqtt_on_disconnect + mqttc.on_message = self._async_mqtt_on_message + mqttc.on_publish = self._async_mqtt_on_callback + mqttc.on_subscribe = self._async_mqtt_on_callback + mqttc.on_unsubscribe = self._async_mqtt_on_callback if will := self.conf.get(CONF_WILL_MESSAGE, DEFAULT_WILL): will_message = PublishMessage(**will) - self._mqttc.will_set( + mqttc.will_set( topic=will_message.topic, payload=will_message.payload, qos=will_message.qos, retain=will_message.retain, ) + self._mqttc = mqttc + + async def _misc_loop(self) -> None: + """Start the MQTT client misc loop.""" + # pylint: disable=import-outside-toplevel + import paho.mqtt.client as mqtt + + while self._mqttc.loop_misc() == mqtt.MQTT_ERR_SUCCESS: + await asyncio.sleep(1) + + @callback + def _async_reader_callback(self, client: mqtt.Client) -> None: + """Handle reading data from the socket.""" + if (status := client.loop_read()) != 0: + self._async_on_disconnect(status) + + @callback + def _async_start_misc_loop(self) -> None: + """Start the misc loop.""" + if self._misc_task is None or self._misc_task.done(): + _LOGGER.debug("%s: Starting client misc loop", self.config_entry.title) + self._misc_task = self.config_entry.async_create_background_task( + self.hass, self._misc_loop(), name="mqtt misc loop" + ) + + def _on_socket_open( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket open.""" + self.loop.call_soon_threadsafe( + self._async_on_socket_open, client, userdata, sock + ) + + @callback + def _async_on_socket_open( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket open.""" + fileno = sock.fileno() + _LOGGER.debug("%s: connection opened %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.add_reader(sock, partial(self._async_reader_callback, client)) + self._async_start_misc_loop() + + @callback + def _async_on_socket_close( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket close.""" + fileno = sock.fileno() + _LOGGER.debug("%s: connection closed %s", self.config_entry.title, fileno) + # If socket close is called before the connect + # result is set make sure the first connection result is set + self._async_connection_result(False) + if fileno > -1: + self.loop.remove_reader(sock) + if self._misc_task is not None and not self._misc_task.done(): + self._misc_task.cancel() + + @callback + def _async_writer_callback(self, client: mqtt.Client) -> None: + """Handle writing data to the socket.""" + if (status := client.loop_write()) != 0: + self._async_on_disconnect(status) + + def _on_socket_register_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Register the socket for writing.""" + self.loop.call_soon_threadsafe( + self._async_on_socket_register_write, client, None, sock + ) + + @callback + def _async_on_socket_register_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Register the socket for writing.""" + fileno = sock.fileno() + _LOGGER.debug("%s: register write %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.add_writer(sock, partial(self._async_writer_callback, client)) + + @callback + def _async_on_socket_unregister_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Unregister the socket for writing.""" + fileno = sock.fileno() + _LOGGER.debug("%s: unregister write %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.remove_writer(sock) + def _is_active_subscription(self, topic: str) -> bool: """Check if a topic has an active subscription.""" return topic in self._simple_subscriptions or any( @@ -485,10 +612,7 @@ class MQTT: self, topic: str, payload: PublishPayloadType, qos: int, retain: bool ) -> None: """Publish a MQTT message.""" - async with self._paho_lock: - msg_info = await self.hass.async_add_executor_job( - self._mqttc.publish, topic, payload, qos, retain - ) + msg_info = self._mqttc.publish(topic, payload, qos, retain) _LOGGER.debug( "Transmitting%s message on %s: '%s', mid: %s, qos: %s", " retained" if retain else "", @@ -500,37 +624,71 @@ class MQTT: _raise_on_error(msg_info.rc) await self._wait_for_mid(msg_info.mid) - async def async_connect(self) -> None: + async def async_connect(self, client_available: asyncio.Future[bool]) -> None: """Connect to the host. Does not process messages yet.""" # pylint: disable-next=import-outside-toplevel import paho.mqtt.client as mqtt result: int | None = None + self._available_future = client_available + self._should_reconnect = True try: - result = await self.hass.async_add_executor_job( - self._mqttc.connect, - self.conf[CONF_BROKER], - self.conf.get(CONF_PORT, DEFAULT_PORT), - self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), - ) + async with self._connection_lock, self._async_connect_in_executor(): + result = await self.hass.async_add_executor_job( + self._mqttc.connect, + self.conf[CONF_BROKER], + self.conf.get(CONF_PORT, DEFAULT_PORT), + self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), + ) except OSError as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) + self._async_connection_result(False) + finally: + if result is not None and result != 0: + if result is not None: + _LOGGER.error( + "Failed to connect to MQTT server: %s", + mqtt.error_string(result), + ) + self._async_connection_result(False) - if result is not None and result != 0: - _LOGGER.error( - "Failed to connect to MQTT server: %s", mqtt.error_string(result) + @callback + def _async_connection_result(self, connected: bool) -> None: + """Handle a connection result.""" + if self._available_future and not self._available_future.done(): + self._available_future.set_result(connected) + + if connected: + self._async_cancel_reconnect() + elif self._should_reconnect and not self._reconnect_task: + self._reconnect_task = self.config_entry.async_create_background_task( + self.hass, self._reconnect_loop(), "mqtt reconnect loop" ) - self._mqttc.loop_start() + @callback + def _async_cancel_reconnect(self) -> None: + """Cancel the reconnect task.""" + if self._reconnect_task: + self._reconnect_task.cancel() + self._reconnect_task = None + + async def _reconnect_loop(self) -> None: + """Reconnect to the MQTT server.""" + while True: + if not self.connected: + try: + async with self._connection_lock, self._async_connect_in_executor(): + await self.hass.async_add_executor_job(self._mqttc.reconnect) + except OSError as err: + _LOGGER.debug( + "Error re-connecting to MQTT server due to exception: %s", err + ) + + await asyncio.sleep(RECONNECT_INTERVAL_SECONDS) async def async_disconnect(self) -> None: """Stop the MQTT client.""" - def stop() -> None: - """Stop the MQTT client.""" - # Do not disconnect, we want the broker to always publish will - self._mqttc.loop_stop() - def no_more_acks() -> bool: """Return False if there are unprocessed ACKs.""" return not any(not op.is_set() for op in self._pending_operations.values()) @@ -549,8 +707,10 @@ class MQTT: await self._pending_operations_condition.wait_for(no_more_acks) # stop the MQTT loop - async with self._paho_lock: - await self.hass.async_add_executor_job(stop) + async with self._connection_lock: + self._should_reconnect = False + self._async_cancel_reconnect() + self._mqttc.disconnect() @callback def async_restore_tracked_subscriptions( @@ -689,11 +849,8 @@ class MQTT: subscriptions: dict[str, int] = self._pending_subscriptions self._pending_subscriptions = {} - async with self._paho_lock: - subscription_list = list(subscriptions.items()) - result, mid = await self.hass.async_add_executor_job( - self._mqttc.subscribe, subscription_list - ) + subscription_list = list(subscriptions.items()) + result, mid = self._mqttc.subscribe(subscription_list) for topic, qos in subscriptions.items(): _LOGGER.debug("Subscribing to %s, mid: %s, qos: %s", topic, mid, qos) @@ -712,17 +869,15 @@ class MQTT: topics = list(self._pending_unsubscribes) self._pending_unsubscribes = set() - async with self._paho_lock: - result, mid = await self.hass.async_add_executor_job( - self._mqttc.unsubscribe, topics - ) + result, mid = self._mqttc.unsubscribe(topics) _raise_on_error(result) for topic in topics: _LOGGER.debug("Unsubscribing from %s, mid: %s", topic, mid) await self._wait_for_mid(mid) - def _mqtt_on_connect( + @callback + def _async_mqtt_on_connect( self, _mqttc: mqtt.Client, _userdata: None, @@ -746,7 +901,7 @@ class MQTT: return self.connected = True - dispatcher_send(self.hass, MQTT_CONNECTED) + async_dispatcher_send(self.hass, MQTT_CONNECTED) _LOGGER.info( "Connected to MQTT server %s:%s (%s)", self.conf[CONF_BROKER], @@ -754,7 +909,7 @@ class MQTT: result_code, ) - self.hass.create_task(self._async_resubscribe()) + self.hass.async_create_task(self._async_resubscribe()) if birth := self.conf.get(CONF_BIRTH_MESSAGE, DEFAULT_BIRTH): @@ -771,13 +926,17 @@ class MQTT: ) birth_message = PublishMessage(**birth) - asyncio.run_coroutine_threadsafe( - publish_birth_message(birth_message), self.hass.loop + self.config_entry.async_create_background_task( + self.hass, + publish_birth_message(birth_message), + name="mqtt birth message", ) else: # Update subscribe cooldown period to a shorter time self._subscribe_debouncer.set_timeout(SUBSCRIBE_COOLDOWN) + self._async_connection_result(True) + async def _async_resubscribe(self) -> None: """Resubscribe on reconnect.""" self._max_qos.clear() @@ -796,16 +955,6 @@ class MQTT: ) await self._async_perform_subscriptions() - def _mqtt_on_message( - self, _mqttc: mqtt.Client, _userdata: None, msg: mqtt.MQTTMessage - ) -> None: - """Message received callback.""" - # MQTT messages tend to be high volume, - # and since they come in via a thread and need to be processed in the event loop, - # we want to avoid hass.add_job since most of the time is spent calling - # inspect to figure out how to run the callback. - self.loop.call_soon_threadsafe(self._mqtt_handle_message, msg) - @lru_cache(None) # pylint: disable=method-cache-max-size-none def _matching_subscriptions(self, topic: str) -> list[Subscription]: subscriptions: list[Subscription] = [] @@ -819,7 +968,9 @@ class MQTT: return subscriptions @callback - def _mqtt_handle_message(self, msg: mqtt.MQTTMessage) -> None: + def _async_mqtt_on_message( + self, _mqttc: mqtt.Client, _userdata: None, msg: mqtt.MQTTMessage + ) -> None: topic = msg.topic # msg.topic is a property that decodes the topic to a string # every time it is accessed. Save the result to avoid @@ -878,7 +1029,8 @@ class MQTT: self.hass.async_run_hass_job(subscription.job, receive_msg) self._mqtt_data.state_write_requests.process_write_state_requests(msg) - def _mqtt_on_callback( + @callback + def _async_mqtt_on_callback( self, _mqttc: mqtt.Client, _userdata: None, @@ -890,7 +1042,7 @@ class MQTT: # The callback signature for on_unsubscribe is different from on_subscribe # see https://github.com/eclipse/paho.mqtt.python/issues/687 # properties and reasoncodes are not used in Home Assistant - self.hass.create_task(self._mqtt_handle_mid(mid)) + self.hass.async_create_task(self._mqtt_handle_mid(mid)) async def _mqtt_handle_mid(self, mid: int) -> None: # Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid @@ -906,7 +1058,8 @@ class MQTT: if mid not in self._pending_operations: self._pending_operations[mid] = asyncio.Event() - def _mqtt_on_disconnect( + @callback + def _async_mqtt_on_disconnect( self, _mqttc: mqtt.Client, _userdata: None, @@ -914,8 +1067,19 @@ class MQTT: properties: mqtt.Properties | None = None, ) -> None: """Disconnected callback.""" + self._async_on_disconnect(result_code) + + @callback + def _async_on_disconnect(self, result_code: int) -> None: + if not self.connected: + # This function is re-entrant and may be called multiple times + # when there is a broken pipe error. + return + # If disconnect is called before the connect + # result is set make sure the first connection result is set + self._async_connection_result(False) self.connected = False - dispatcher_send(self.hass, MQTT_DISCONNECTED) + async_dispatcher_send(self.hass, MQTT_DISCONNECTED) _LOGGER.warning( "Disconnected from MQTT server %s:%s (%s)", self.conf[CONF_BROKER], diff --git a/tests/common.py b/tests/common.py index d53db1beb37..b5fe0f7bae1 100644 --- a/tests/common.py +++ b/tests/common.py @@ -452,7 +452,7 @@ def async_fire_mqtt_message( mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client - mqtt_data.client._mqtt_handle_message(msg) + mqtt_data.client._async_mqtt_on_message(Mock(), None, msg) fire_mqtt_message = threadsafe_callback_factory(async_fire_mqtt_message) diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 3e444e8d4c8..37f7e0cf587 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -4,17 +4,22 @@ import asyncio from copy import deepcopy from datetime import datetime, timedelta import json +import socket import ssl from typing import Any, TypedDict from unittest.mock import ANY, MagicMock, call, mock_open, patch from freezegun.api import FrozenDateTimeFactory +import paho.mqtt.client as paho_mqtt import pytest import voluptuous as vol from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.client import EnsureJobAfterCooldown +from homeassistant.components.mqtt.client import ( + RECONNECT_INTERVAL_SECONDS, + EnsureJobAfterCooldown, +) from homeassistant.components.mqtt.mixins import MQTT_ENTITY_DEVICE_INFO_SCHEMA from homeassistant.components.mqtt.models import ( MessageCallbackType, @@ -146,7 +151,7 @@ async def test_mqtt_disconnects_on_home_assistant_stop( hass.bus.fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() await hass.async_block_till_done() - assert mqtt_client_mock.loop_stop.call_count == 1 + assert mqtt_client_mock.disconnect.call_count == 1 async def test_mqtt_await_ack_at_disconnect( @@ -161,8 +166,14 @@ async def test_mqtt_await_ack_at_disconnect( rc = 0 with patch("paho.mqtt.client.Client") as mock_client: - mock_client().connect = MagicMock(return_value=0) - mock_client().publish = MagicMock(return_value=FakeInfo()) + mqtt_client = mock_client.return_value + mqtt_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mqtt_client.on_connect, mqtt_client, None, 0, 0, 0 + ), + ) + mqtt_client.publish = MagicMock(return_value=FakeInfo()) entry = MockConfigEntry( domain=mqtt.DOMAIN, data={"certificate": "auto", mqtt.CONF_BROKER: "test-broker"}, @@ -1669,6 +1680,7 @@ async def test_not_calling_subscribe_when_unsubscribed_within_cooldown( the subscribe cool down period has ended. """ mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock.subscribe.reset_mock() # Fake that the client is connected mqtt_mock().connected = True @@ -1925,6 +1937,7 @@ async def test_canceling_debouncer_on_shutdown( """Test canceling the debouncer when HA shuts down.""" mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock.subscribe.reset_mock() # Fake that the client is connected mqtt_mock().connected = True @@ -2008,7 +2021,7 @@ async def test_initial_setup_logs_error( """Test for setup failure if initial client connection fails.""" entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) entry.add_to_hass(hass) - mqtt_client_mock.connect.return_value = 1 + mqtt_client_mock.connect.side_effect = MagicMock(return_value=1) try: assert await hass.config_entries.async_setup(entry.entry_id) except HomeAssistantError: @@ -2230,7 +2243,12 @@ async def test_handle_mqtt_timeout_on_callback( mock_client = mock_client.return_value mock_client.publish.return_value = FakeInfo() mock_client.subscribe.side_effect = _mock_ack - mock_client.connect.return_value = 0 + mock_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mock_client.on_connect, mock_client, None, 0, 0, 0 + ), + ) entry = MockConfigEntry( domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} @@ -4144,3 +4162,179 @@ async def test_multi_platform_discovery( ) is not None ) + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_auto_reconnect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reconnection is automatically done.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + mqtt_client_mock.reconnect.reset_mock() + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + mqtt_client_mock.reconnect.side_effect = OSError("foo") + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 1 + assert "Error re-connecting to MQTT server due to exception: foo" in caplog.text + + mqtt_client_mock.reconnect.side_effect = None + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + # Should not reconnect after stop + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_server_sock_connect_and_disconnect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + server.close() # mock the server closing the connection on us + + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.on_socket_unregister_write(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_close(mqtt_client_mock, None, client) + mqtt_client_mock.on_disconnect(mqtt_client_mock, None, client) + await hass.async_block_till_done() + unsub() + + # Should have failed + assert len(calls) == 0 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_client_sock_failure_after_connect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + # Fake that the client is connected + mqtt_mock().connected = True + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_writer(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + mqtt_client_mock.loop_write.side_effect = OSError("foo") + client.close() # close the client socket out from under the client + + assert mqtt_mock.connected is True + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + + unsub() + # Should have failed + assert len(calls) == 0 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_loop_write_failure( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + mqtt_client_mock.loop_write.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.loop_read.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + + # Fill up the outgoing buffer to ensure that loop_write + # and loop_read are called that next time control is + # returned to the event loop + try: + for _ in range(1000): + server.send(b"long" * 100) + except BlockingIOError: + pass + + server.close() + # Once for the reader callback + await hass.async_block_till_done() + # Another for the writer callback + await hass.async_block_till_done() + # Final for the disconnect callback + await hass.async_block_till_done() + + assert "Disconnected from MQTT server mock-broker:1883 (7)" in caplog.text diff --git a/tests/components/tasmota/test_common.py b/tests/components/tasmota/test_common.py index 360794e280f..499e732719c 100644 --- a/tests/components/tasmota/test_common.py +++ b/tests/components/tasmota/test_common.py @@ -163,7 +163,7 @@ async def help_test_availability_when_connection_lost( # Disconnected from MQTT server -> state changed to unavailable mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -172,7 +172,7 @@ async def help_test_availability_when_connection_lost( # Reconnected to MQTT server -> state still unavailable mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -224,7 +224,7 @@ async def help_test_deep_sleep_availability_when_connection_lost( # Disconnected from MQTT server -> state changed to unavailable mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -233,7 +233,7 @@ async def help_test_deep_sleep_availability_when_connection_lost( # Reconnected to MQTT server -> state no longer unavailable mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -476,7 +476,7 @@ async def help_test_availability_poll_state( # Disconnected from MQTT server mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -484,7 +484,7 @@ async def help_test_availability_poll_state( # Reconnected to MQTT server mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() diff --git a/tests/conftest.py b/tests/conftest.py index a38da17f44b..3a95e0e58b3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -904,26 +904,45 @@ def mqtt_client_mock(hass: HomeAssistant) -> Generator[MqttMockPahoClient, None, self.rc = 0 with patch("paho.mqtt.client.Client") as mock_client: + # The below use a call_soon for the on_publish/on_subscribe/on_unsubscribe + # callbacks to simulate the behavior of the real MQTT client which will + # not be synchronous. @ha.callback def _async_fire_mqtt_message(topic, payload, qos, retain): async_fire_mqtt_message(hass, topic, payload, qos, retain) mid = get_mid() - mock_client.on_publish(0, 0, mid) + hass.loop.call_soon(mock_client.on_publish, 0, 0, mid) return FakeInfo(mid) def _subscribe(topic, qos=0): mid = get_mid() - mock_client.on_subscribe(0, 0, mid) + hass.loop.call_soon(mock_client.on_subscribe, 0, 0, mid) return (0, mid) def _unsubscribe(topic): mid = get_mid() - mock_client.on_unsubscribe(0, 0, mid) + hass.loop.call_soon(mock_client.on_unsubscribe, 0, 0, mid) return (0, mid) + def _connect(*args, **kwargs): + # Connect always calls reconnect once, but we + # mock it out so we call reconnect to simulate + # the behavior. + mock_client.reconnect() + hass.loop.call_soon_threadsafe( + mock_client.on_connect, mock_client, None, 0, 0, 0 + ) + mock_client.on_socket_open( + mock_client, None, Mock(fileno=Mock(return_value=-1)) + ) + mock_client.on_socket_register_write( + mock_client, None, Mock(fileno=Mock(return_value=-1)) + ) + return 0 + mock_client = mock_client.return_value - mock_client.connect.return_value = 0 + mock_client.connect.side_effect = _connect mock_client.subscribe.side_effect = _subscribe mock_client.unsubscribe.side_effect = _unsubscribe mock_client.publish.side_effect = _async_fire_mqtt_message @@ -985,6 +1004,7 @@ async def _mqtt_mock_entry( # connected set to True to get a more realistic behavior when subscribing mock_mqtt_instance.connected = True + mqtt_client_mock.on_connect(mqtt_client_mock, None, 0, 0, 0) async_dispatcher_send(hass, mqtt.MQTT_CONNECTED) await hass.async_block_till_done() From 8754b12d08bfafbd5075d979868078ab48aaf886 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 21 Apr 2024 22:57:57 +0200 Subject: [PATCH 227/426] Temporarily pickup mqtt codeowner (#115934) --- CODEOWNERS | 4 ++-- homeassistant/components/mqtt/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 0a833a94e4e..ef997cfa896 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -875,8 +875,8 @@ build.json @home-assistant/supervisor /tests/components/motioneye/ @dermotduffy /homeassistant/components/motionmount/ @RJPoelstra /tests/components/motionmount/ @RJPoelstra -/homeassistant/components/mqtt/ @emontnemery @jbouwh -/tests/components/mqtt/ @emontnemery @jbouwh +/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco +/tests/components/mqtt/ @emontnemery @jbouwh @bdraco /homeassistant/components/msteams/ @peroyvind /homeassistant/components/mullvad/ @meichthys /tests/components/mullvad/ @meichthys diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 3a284c6719c..5f923868270 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -1,7 +1,7 @@ { "domain": "mqtt", "name": "MQTT", - "codeowners": ["@emontnemery", "@jbouwh"], + "codeowners": ["@emontnemery", "@jbouwh", "@bdraco"], "config_flow": true, "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", From 895f73d8e437cbe91a17b05e68f3f5f806357ffe Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sun, 21 Apr 2024 23:25:27 +0200 Subject: [PATCH 228/426] Enable Ruff A001 (#115654) --- .../assist_pipeline/websocket_api.py | 7 +- homeassistant/components/jellyfin/__init__.py | 2 +- .../components/media_extractor/__init__.py | 8 +- homeassistant/components/nest/device_info.py | 2 +- .../components/onewire/binary_sensor.py | 24 ++-- homeassistant/components/onewire/sensor.py | 12 +- homeassistant/components/onewire/switch.py | 48 ++++---- homeassistant/components/risco/sensor.py | 4 +- homeassistant/components/roborock/vacuum.py | 6 +- .../components/synology_dsm/__init__.py | 4 +- homeassistant/components/zwave_js/siren.py | 3 +- homeassistant/helpers/template.py | 4 +- pyproject.toml | 3 +- tests/components/alexa/test_common.py | 8 +- .../components/device_automation/test_init.py | 24 ++-- .../devolo_home_control/test_siren.py | 12 +- tests/components/hue/test_config_flow.py | 5 +- tests/components/mqtt/test_init.py | 22 ++-- tests/components/recorder/common.py | 26 ++-- tests/components/recorder/test_util.py | 12 +- .../components/recorder/test_websocket_api.py | 8 +- tests/components/risco/test_binary_sensor.py | 24 ++-- tests/components/risco/test_sensor.py | 12 +- tests/components/sensor/test_recorder.py | 94 +++++++-------- .../components/shelly/test_device_trigger.py | 8 +- tests/components/smartthings/conftest.py | 2 +- tests/components/tautulli/test_config_flow.py | 6 +- tests/components/trace/test_websocket_api.py | 112 +++++++++--------- tests/components/twinkly/test_init.py | 6 +- tests/components/vizio/conftest.py | 2 +- tests/components/zwave_js/test_diagnostics.py | 4 +- tests/helpers/test_condition.py | 8 +- tests/helpers/test_entity.py | 36 +++--- tests/test_core.py | 16 +-- tests/util/test_percentage.py | 84 ++++++------- tests/util/yaml/test_init.py | 10 +- 36 files changed, 347 insertions(+), 321 deletions(-) diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py index 7550f860a9b..3e8cdf6fa42 100644 --- a/homeassistant/components/assist_pipeline/websocket_api.py +++ b/homeassistant/components/assist_pipeline/websocket_api.py @@ -291,8 +291,11 @@ def websocket_list_runs( msg["id"], { "pipeline_runs": [ - {"pipeline_run_id": id, "timestamp": pipeline_run.timestamp} - for id, pipeline_run in pipeline_debug.items() + { + "pipeline_run_id": pipeline_run_id, + "timestamp": pipeline_run.timestamp, + } + for pipeline_run_id, pipeline_run in pipeline_debug.items() ] }, ) diff --git a/homeassistant/components/jellyfin/__init__.py b/homeassistant/components/jellyfin/__init__.py index c24f06d7b19..de9fa805f02 100644 --- a/homeassistant/components/jellyfin/__init__.py +++ b/homeassistant/components/jellyfin/__init__.py @@ -73,6 +73,6 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( ( (DOMAIN, coordinator.server_id), - *((DOMAIN, id) for id in coordinator.device_ids), + *((DOMAIN, device_id) for device_id in coordinator.device_ids), ) ) diff --git a/homeassistant/components/media_extractor/__init__.py b/homeassistant/components/media_extractor/__init__.py index 228a012a04f..139acf06cf6 100644 --- a/homeassistant/components/media_extractor/__init__.py +++ b/homeassistant/components/media_extractor/__init__.py @@ -278,9 +278,9 @@ def get_best_stream_youtube(formats: list[dict[str, Any]]) -> str: return get_best_stream( [ - format - for format in formats - if format.get("acodec", "none") != "none" - and format.get("vcodec", "none") != "none" + stream_format + for stream_format in formats + if stream_format.get("acodec", "none") != "none" + and stream_format.get("vcodec", "none") != "none" ] ) diff --git a/homeassistant/components/nest/device_info.py b/homeassistant/components/nest/device_info.py index f269e3e89d6..33793fe836b 100644 --- a/homeassistant/components/nest/device_info.py +++ b/homeassistant/components/nest/device_info.py @@ -73,7 +73,7 @@ class NestDeviceInfo: """Return device suggested area based on the Google Home room.""" if parent_relations := self._device.parent_relations: items = sorted(parent_relations.items()) - names = [name for id, name in items] + names = [name for _, name in items] return " ".join(names) return None diff --git a/homeassistant/components/onewire/binary_sensor.py b/homeassistant/components/onewire/binary_sensor.py index fea78fd3760..d2e66609103 100644 --- a/homeassistant/components/onewire/binary_sensor.py +++ b/homeassistant/components/onewire/binary_sensor.py @@ -36,33 +36,33 @@ class OneWireBinarySensorEntityDescription( DEVICE_BINARY_SENSORS: dict[str, tuple[OneWireBinarySensorEntityDescription, ...]] = { "12": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "29": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ), "3A": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "EF": (), # "HobbyBoard": special } @@ -71,15 +71,15 @@ DEVICE_BINARY_SENSORS: dict[str, tuple[OneWireBinarySensorEntityDescription, ... HOBBYBOARD_EF: dict[str, tuple[OneWireBinarySensorEntityDescription, ...]] = { "HB_HUB": tuple( OneWireBinarySensorEntityDescription( - key=f"hub/short.{id}", + key=f"hub/short.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.DIAGNOSTIC, device_class=BinarySensorDeviceClass.PROBLEM, translation_key="hub_short_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), } diff --git a/homeassistant/components/onewire/sensor.py b/homeassistant/components/onewire/sensor.py index d32afce7fa9..46f18842d51 100644 --- a/homeassistant/components/onewire/sensor.py +++ b/homeassistant/components/onewire/sensor.py @@ -233,14 +233,14 @@ DEVICE_SENSORS: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { "42": (SIMPLE_TEMPERATURE_SENSOR_DESCRIPTION,), "1D": tuple( OneWireSensorEntityDescription( - key=f"counter.{id}", + key=f"counter.{device_key}", native_unit_of_measurement="count", read_mode=READ_MODE_INT, state_class=SensorStateClass.TOTAL_INCREASING, translation_key="counter_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), } @@ -273,15 +273,15 @@ HOBBYBOARD_EF: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { ), "HB_MOISTURE_METER": tuple( OneWireSensorEntityDescription( - key=f"moisture/sensor.{id}", + key=f"moisture/sensor.{device_key}", device_class=SensorDeviceClass.PRESSURE, native_unit_of_measurement=UnitOfPressure.CBAR, read_mode=READ_MODE_FLOAT, state_class=SensorStateClass.MEASUREMENT, translation_key="moisture_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), } diff --git a/homeassistant/components/onewire/switch.py b/homeassistant/components/onewire/switch.py index cdf1315394e..41276218540 100644 --- a/homeassistant/components/onewire/switch.py +++ b/homeassistant/components/onewire/switch.py @@ -40,23 +40,23 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { "12": tuple( [ OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ] + [ OneWireSwitchEntityDescription( - key=f"latch.{id}", + key=f"latch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="latch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ] ), "26": ( @@ -71,34 +71,34 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { "29": tuple( [ OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ] + [ OneWireSwitchEntityDescription( - key=f"latch.{id}", + key=f"latch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="latch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ] ), "3A": tuple( OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "EF": (), # "HobbyBoard": special } @@ -108,37 +108,37 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { HOBBYBOARD_EF: dict[str, tuple[OneWireEntityDescription, ...]] = { "HB_HUB": tuple( OneWireSwitchEntityDescription( - key=f"hub/branch.{id}", + key=f"hub/branch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="hub_branch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), "HB_MOISTURE_METER": tuple( [ OneWireSwitchEntityDescription( - key=f"moisture/is_leaf.{id}", + key=f"moisture/is_leaf.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="leaf_sensor_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ] + [ OneWireSwitchEntityDescription( - key=f"moisture/is_moisture.{id}", + key=f"moisture/is_moisture.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="moisture_sensor_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ] ), } diff --git a/homeassistant/components/risco/sensor.py b/homeassistant/components/risco/sensor.py index f4d6ddaf451..8f97c76c879 100644 --- a/homeassistant/components/risco/sensor.py +++ b/homeassistant/components/risco/sensor.py @@ -56,8 +56,8 @@ async def async_setup_entry( config_entry.entry_id ][EVENTS_COORDINATOR] sensors = [ - RiscoSensor(coordinator, id, [], name, config_entry.entry_id) - for id, name in CATEGORIES.items() + RiscoSensor(coordinator, category_id, [], name, config_entry.entry_id) + for category_id, name in CATEGORIES.items() ] sensors.append( RiscoSensor( diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index d8108abf78c..16cf518aa02 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -178,4 +178,8 @@ class RoborockVacuum(RoborockCoordinatedEntity, StateVacuumEntity): async def get_maps(self) -> ServiceResponse: """Get map information such as map id and room ids.""" - return {"maps": [asdict(map) for map in self.coordinator.maps.values()]} + return { + "maps": [ + asdict(vacuum_map) for vacuum_map in self.coordinator.maps.values() + ] + } diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index ec13ec929a5..2748b27c93d 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -161,6 +161,8 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( ( (DOMAIN, serial), # Base device - *((DOMAIN, f"{serial}_{id}") for id in device_ids), # Storage and cameras + *( + (DOMAIN, f"{serial}_{device_id}") for device_id in device_ids + ), # Storage and cameras ) ) diff --git a/homeassistant/components/zwave_js/siren.py b/homeassistant/components/zwave_js/siren.py index b3f54ae9904..413186da9bf 100644 --- a/homeassistant/components/zwave_js/siren.py +++ b/homeassistant/components/zwave_js/siren.py @@ -63,7 +63,8 @@ class ZwaveSirenEntity(ZWaveBaseEntity, SirenEntity): super().__init__(config_entry, driver, info) # Entity class attributes self._attr_available_tones = { - int(id): val for id, val in self.info.primary_value.metadata.states.items() + int(state_id): val + for state_id, val in self.info.primary_value.metadata.states.items() } self._attr_supported_features = ( SirenEntityFeature.TURN_ON diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 1f0742e896d..16379c1d05c 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -1347,8 +1347,8 @@ def device_id(hass: HomeAssistant, entity_id_or_device_name: str) -> str | None: dev_reg = device_registry.async_get(hass) return next( ( - id - for id, device in dev_reg.devices.items() + device_id + for device_id, device in dev_reg.devices.items() if (name := device.name_by_user or device.name) and (str(entity_id_or_device_name) == name) ), diff --git a/pyproject.toml b/pyproject.toml index 91f75c96fd6..d3487d50a17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -251,7 +251,7 @@ disable = [ "nested-min-max", # PLW3301 "pointless-statement", # B018 "raise-missing-from", # B904 - # "redefined-builtin", # A001, ruff is way more stricter, needs work + "redefined-builtin", # A001 "try-except-raise", # TRY302 "unused-argument", # ARG001, we don't use it "unused-format-string-argument", #F507 @@ -663,6 +663,7 @@ required-version = ">=0.4.1" [tool.ruff.lint] select = [ + "A001", # Variable {name} is shadowing a Python builtin "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 0cc4d995efa..9fdcc1c89c1 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -158,14 +158,14 @@ async def assert_power_controller_works( _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass ) - for property in response["context"]["properties"]: - assert property["timeOfSample"] == timestamp + for context_property in response["context"]["properties"]: + assert context_property["timeOfSample"] == timestamp _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOff", endpoint, off_service, hass ) - for property in response["context"]["properties"]: - assert property["timeOfSample"] == timestamp + for context_property in response["context"]["properties"]: + assert context_property["timeOfSample"] == timestamp async def assert_scene_controller_works( diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 4526a9d9b67..3c3101d7a1f 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -328,23 +328,23 @@ async def test_websocket_get_action_capabilities( assert msg["success"] actions = msg["result"] - id = 2 + msg_id = 2 assert len(actions) == 3 for action in actions: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/action/capabilities", "action": action, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities[action["type"]] - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_action_capabilities_unknown_domain( @@ -487,23 +487,23 @@ async def test_websocket_get_condition_capabilities( assert msg["success"] conditions = msg["result"] - id = 2 + msg_id = 2 assert len(conditions) == 2 for condition in conditions: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/condition/capabilities", "condition": condition, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_condition_capabilities_unknown_domain( @@ -775,23 +775,23 @@ async def test_websocket_get_trigger_capabilities( assert msg["success"] triggers = msg["result"] - id = 2 + msg_id = 2 assert len(triggers) == 3 # toggled, turned_on, turned_off for trigger in triggers: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/trigger/capabilities", "trigger": trigger, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_trigger_capabilities_unknown_domain( diff --git a/tests/components/devolo_home_control/test_siren.py b/tests/components/devolo_home_control/test_siren.py index 037d7b5021f..be662418967 100644 --- a/tests/components/devolo_home_control/test_siren.py +++ b/tests/components/devolo_home_control/test_siren.py @@ -66,7 +66,7 @@ async def test_siren_switching( with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: await hass.services.async_call( "siren", "turn_on", @@ -78,11 +78,11 @@ async def test_siren_switching( "Test", ("devolo.SirenMultiLevelSwitch:Test", 1) ) await hass.async_block_till_done() - set.assert_called_once_with(1) + property_set.assert_called_once_with(1) with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: await hass.services.async_call( "siren", "turn_off", @@ -95,7 +95,7 @@ async def test_siren_switching( ) await hass.async_block_till_done() assert hass.states.get(f"{DOMAIN}.test").state == STATE_OFF - set.assert_called_once_with(0) + property_set.assert_called_once_with(0) @pytest.mark.usefixtures("mock_zeroconf") @@ -119,7 +119,7 @@ async def test_siren_change_default_tone( with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: test_gateway.publisher.dispatch("Test", ("mss:Test", 2)) await hass.services.async_call( "siren", @@ -127,7 +127,7 @@ async def test_siren_change_default_tone( {"entity_id": f"{DOMAIN}.test"}, blocking=True, ) - set.assert_called_once_with(2) + property_set.assert_called_once_with(2) @pytest.mark.usefixtures("mock_zeroconf") diff --git a/tests/components/hue/test_config_flow.py b/tests/components/hue/test_config_flow.py index 325c32227e3..692bd1405cf 100644 --- a/tests/components/hue/test_config_flow.py +++ b/tests/components/hue/test_config_flow.py @@ -36,7 +36,10 @@ def create_mock_api_discovery(aioclient_mock, bridges): """Patch aiohttp responses with fake data for bridge discovery.""" aioclient_mock.get( URL_NUPNP, - json=[{"internalipaddress": host, "id": id} for (host, id) in bridges], + json=[ + {"internalipaddress": host, "id": bridge_id} + for (host, bridge_id) in bridges + ], ) for host, bridge_id in bridges: aioclient_mock.get( diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 37f7e0cf587..7bb43568b30 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -3033,14 +3033,16 @@ async def test_debug_info_multiple_devices( for dev in devices: data = json.dumps(dev["config"]) domain = dev["domain"] - id = dev["config"]["device"]["identifiers"][0] - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{id}/config", data) + device_id = dev["config"]["device"]["identifiers"][0] + async_fire_mqtt_message( + hass, f"homeassistant/{domain}/{device_id}/config", data + ) await hass.async_block_till_done() for dev in devices: domain = dev["domain"] - id = dev["config"]["device"]["identifiers"][0] - device = device_registry.async_get_device(identifiers={("mqtt", id)}) + device_id = dev["config"]["device"]["identifiers"][0] + device = device_registry.async_get_device(identifiers={("mqtt", device_id)}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3058,7 +3060,7 @@ async def test_debug_info_multiple_devices( assert len(debug_info_data["triggers"]) == 1 discovery_data = debug_info_data["triggers"][0]["discovery_data"] - assert discovery_data["topic"] == f"homeassistant/{domain}/{id}/config" + assert discovery_data["topic"] == f"homeassistant/{domain}/{device_id}/config" assert discovery_data["payload"] == dev["config"] @@ -3116,8 +3118,10 @@ async def test_debug_info_multiple_entities_triggers( data = json.dumps(c["config"]) domain = c["domain"] # Use topic as discovery_id - id = c["config"].get("topic", c["config"].get("state_topic")) - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{id}/config", data) + discovery_id = c["config"].get("topic", c["config"].get("state_topic")) + async_fire_mqtt_message( + hass, f"homeassistant/{domain}/{discovery_id}/config", data + ) await hass.async_block_till_done() device_id = config[0]["config"]["device"]["identifiers"][0] @@ -3131,7 +3135,7 @@ async def test_debug_info_multiple_entities_triggers( # Test we get debug info for each entity and trigger domain = c["domain"] # Use topic as discovery_id - id = c["config"].get("topic", c["config"].get("state_topic")) + discovery_id = c["config"].get("topic", c["config"].get("state_topic")) if c["domain"] != "device_automation": discovery_data = [e["discovery_data"] for e in debug_info_data["entities"]] @@ -3143,7 +3147,7 @@ async def test_debug_info_multiple_entities_triggers( discovery_data = [e["discovery_data"] for e in debug_info_data["triggers"]] assert { - "topic": f"homeassistant/{domain}/{id}/config", + "topic": f"homeassistant/{domain}/{discovery_id}/config", "payload": c["config"], } in discovery_data diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 7a57b226d77..e0f43323f25 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -109,7 +109,9 @@ async def async_wait_recording_done(hass: HomeAssistant) -> None: await hass.async_block_till_done() -async def async_wait_purge_done(hass: HomeAssistant, max: int | None = None) -> None: +async def async_wait_purge_done( + hass: HomeAssistant, max_number: int | None = None +) -> None: """Wait for max number of purge events. Because a purge may insert another PurgeTask into @@ -117,9 +119,9 @@ async def async_wait_purge_done(hass: HomeAssistant, max: int | None = None) -> a maximum number of WaitTasks that we will put into the queue. """ - if not max: - max = DEFAULT_PURGE_TASKS - for _ in range(max + 1): + if not max_number: + max_number = DEFAULT_PURGE_TASKS + for _ in range(max_number + 1): await async_wait_recording_done(hass) @@ -325,10 +327,10 @@ def convert_pending_states_to_meta(instance: Recorder, session: Session) -> None entity_ids: set[str] = set() states: set[States] = set() states_meta_objects: dict[str, StatesMeta] = {} - for object in session: - if isinstance(object, States): - entity_ids.add(object.entity_id) - states.add(object) + for session_object in session: + if isinstance(session_object, States): + entity_ids.add(session_object.entity_id) + states.add(session_object) entity_id_to_metadata_ids = instance.states_meta_manager.get_many( entity_ids, session, True @@ -352,10 +354,10 @@ def convert_pending_events_to_event_types(instance: Recorder, session: Session) event_types: set[str] = set() events: set[Events] = set() event_types_objects: dict[str, EventTypes] = {} - for object in session: - if isinstance(object, Events): - event_types.add(object.event_type) - events.add(object) + for session_object in session: + if isinstance(session_object, Events): + event_types.add(session_object.event_type) + events.add(session_object) event_type_to_event_type_ids = instance.event_type_manager.get_many( event_types, session, True diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 549280efba2..9e32fa2c500 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1040,14 +1040,14 @@ async def test_resolve_period(hass: HomeAssistant) -> None: def test_chunked_or_all(): """Test chunked_or_all can iterate chunk sizes larger than the passed in collection.""" - all = [] + all_items = [] incoming = (1, 2, 3, 4) for chunk in chunked_or_all(incoming, 2): assert len(chunk) == 2 - all.extend(chunk) - assert all == [1, 2, 3, 4] + all_items.extend(chunk) + assert all_items == [1, 2, 3, 4] - all = [] + all_items = [] incoming = (1, 2, 3, 4) for chunk in chunked_or_all(incoming, 5): assert len(chunk) == 4 @@ -1055,5 +1055,5 @@ def test_chunked_or_all(): # collection since we want to avoid copying the collection # if we don't need to assert chunk is incoming - all.extend(chunk) - assert all == [1, 2, 3, 4] + all_items.extend(chunk) + assert all_items == [1, 2, 3, 4] diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index d594218e9d4..4a1410d45a4 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -641,12 +641,12 @@ async def test_statistic_during_period_hole( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" - id = 1 + stat_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal stat_id + stat_id += 1 + return stat_id now = dt_util.utcnow() diff --git a/tests/components/risco/test_binary_sensor.py b/tests/components/risco/test_binary_sensor.py index ea18c59e236..b6ea723064e 100644 --- a/tests/components/risco/test_binary_sensor.py +++ b/tests/components/risco/test_binary_sensor.py @@ -122,11 +122,11 @@ async def test_local_setup( async def _check_local_state( - hass, zones, property, value, entity_id, zone_id, callback + hass, zones, entity_property, value, entity_id, zone_id, callback ): with patch.object( zones[zone_id], - property, + entity_property, new_callable=PropertyMock(return_value=value), ): await callback(zone_id, zones[zone_id]) @@ -210,19 +210,19 @@ async def test_armed_local_states( ) -async def _check_system_state(hass, system, property, value, callback): +async def _check_system_state(hass, system, entity_property, value, callback): with patch.object( system, - property, + entity_property, new_callable=PropertyMock(return_value=value), ): await callback(system) await hass.async_block_till_done() expected_value = STATE_ON if value else STATE_OFF - if property == "ac_trouble": - property = "a_c_trouble" - entity_id = f"binary_sensor.test_site_name_{property}" + if entity_property == "ac_trouble": + entity_property = "a_c_trouble" + entity_id = f"binary_sensor.test_site_name_{entity_property}" assert hass.states.get(entity_id).state == expected_value @@ -275,6 +275,10 @@ async def test_system_states( "clock_trouble", "box_tamper", ] - for property in properties: - await _check_system_state(hass, system_only_local, property, True, callback) - await _check_system_state(hass, system_only_local, property, False, callback) + for entity_property in properties: + await _check_system_state( + hass, system_only_local, entity_property, True, callback + ) + await _check_system_state( + hass, system_only_local, entity_property, False, callback + ) diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 157eb3e62b5..a8236ad3d87 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -133,8 +133,8 @@ async def test_error_on_login( await hass.async_block_till_done() registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert not registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert not registry.async_is_registered(entity_id) def _check_state(hass, category, entity_id): @@ -184,8 +184,8 @@ async def test_cloud_setup( ) -> None: """Test entity setup.""" registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert registry.async_is_registered(entity_id) save_mock.assert_awaited_once_with({LAST_EVENT_TIMESTAMP_KEY: TEST_EVENTS[0].time}) for category, entity_id in ENTITY_IDS.items(): @@ -213,5 +213,5 @@ async def test_local_setup( ) -> None: """Test entity setup.""" registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert not registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert not registry.async_is_registered(entity_id) diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 8084fe69e89..a7aaf938410 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -560,7 +560,7 @@ def test_compile_hourly_statistics_purged_state_changes( ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - mean = min = max = float(hist["sensor.test1"][-1].state) + mean = min_value = max_value = float(hist["sensor.test1"][-1].state) # Purge all states from the database with freeze_time(four): @@ -594,8 +594,8 @@ def test_compile_hourly_statistics_purged_state_changes( "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), - "min": pytest.approx(min), - "max": pytest.approx(max), + "min": pytest.approx(min_value), + "max": pytest.approx(max_value), "last_reset": None, "state": None, "sum": None, @@ -4113,12 +4113,12 @@ async def test_validate_unit_change_convertible( The test also asserts that the sensor's device class is ignored. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4228,12 +4228,12 @@ async def test_validate_statistics_unit_ignore_device_class( The test asserts that the sensor's device class is ignored. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4321,14 +4321,14 @@ async def test_validate_statistics_unit_change_no_device_class( conversion, and the unit is then changed to a unit which can and cannot be converted to the original unit. """ - id = 1 + msg_id = 1 attributes = dict(attributes) attributes.pop("device_class") def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4436,12 +4436,12 @@ async def test_validate_statistics_unsupported_state_class( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4505,12 +4505,12 @@ async def test_validate_statistics_sensor_no_longer_recorded( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4573,12 +4573,12 @@ async def test_validate_statistics_sensor_not_recorded( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4638,12 +4638,12 @@ async def test_validate_statistics_sensor_removed( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4702,12 +4702,12 @@ async def test_validate_statistics_unit_change_no_conversion( unit2, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4837,12 +4837,12 @@ async def test_validate_statistics_unit_change_equivalent_units( This tests no validation issue is created when a sensor's unit changes to an equivalent unit. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4923,12 +4923,12 @@ async def test_validate_statistics_unit_change_equivalent_units_2( equivalent unit which is not known to the unit converters. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -5005,12 +5005,12 @@ async def test_validate_statistics_other_domain( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test sensor does not raise issues for statistics for other domains.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index c4db8acaf6d..39238f1674a 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -96,11 +96,11 @@ async def test_get_triggers_rpc_device( CONF_PLATFORM: "device", CONF_DEVICE_ID: device.id, CONF_DOMAIN: DOMAIN, - CONF_TYPE: type, + CONF_TYPE: trigger_type, CONF_SUBTYPE: "button1", "metadata": {}, } - for type in [ + for trigger_type in [ "btn_down", "btn_up", "single_push", @@ -130,11 +130,11 @@ async def test_get_triggers_button( CONF_PLATFORM: "device", CONF_DEVICE_ID: device.id, CONF_DOMAIN: DOMAIN, - CONF_TYPE: type, + CONF_TYPE: trigger_type, CONF_SUBTYPE: "button", "metadata": {}, } - for type in ["single", "double", "triple", "long"] + for trigger_type in ["single", "double", "triple", "long"] ] triggers = await async_get_device_automations( diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index b6d34b9d98a..d25cc8849e5 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -342,7 +342,7 @@ def event_request_factory_fixture(event_factory): if events is None: events = [] if device_ids: - events.extend([event_factory(id) for id in device_ids]) + events.extend([event_factory(device_id) for device_id in device_ids]) events.append(event_factory(uuid4())) events.append(event_factory(device_ids[0], event_type="OTHER")) request.events = events diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index b731067cd72..ca563cfad77 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -133,7 +133,7 @@ async def test_flow_user_multiple_entries_allowed(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - input = { + user_input = { CONF_URL: "http://1.2.3.5:8181/test", CONF_API_KEY: "efgh", CONF_VERIFY_SSL: True, @@ -141,13 +141,13 @@ async def test_flow_user_multiple_entries_allowed(hass: HomeAssistant) -> None: with patch_config_flow_tautulli(AsyncMock()): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=input, + user_input=user_input, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == NAME - assert result2["data"] == input + assert result2["data"] == user_input async def test_flow_reauth( diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 5c5d882b721..f2cfb6a109f 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -133,12 +133,12 @@ async def test_get_trace( ) -> None: """Test tracing a script or automation.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -429,12 +429,12 @@ async def test_restore_traces( ) -> None: """Test restored traces.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -522,7 +522,7 @@ async def test_trace_overflow( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, stored_traces ) -> None: """Test the number of stored traces per script or automation is limited.""" - id = 1 + msg_id = 1 trace_uuids = [] @@ -532,9 +532,9 @@ async def test_trace_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -601,7 +601,7 @@ async def test_restore_traces_overflow( ) -> None: """Test restored traces are evicted first.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 trace_uuids = [] @@ -611,9 +611,9 @@ async def test_restore_traces_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -682,7 +682,7 @@ async def test_restore_traces_late_overflow( ) -> None: """Test restored traces are evicted first.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 trace_uuids = [] @@ -692,9 +692,9 @@ async def test_restore_traces_late_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -743,12 +743,12 @@ async def test_trace_no_traces( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain ) -> None: """Test the storing traces for a script or automation can be disabled.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -810,12 +810,12 @@ async def test_list_traces( ) -> None: """Test listing script and automation traces.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -943,12 +943,12 @@ async def test_nested_traces( extra_trace_keys, ) -> None: """Test nested automation and script traces.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -1003,12 +1003,12 @@ async def test_breakpoints( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test script and automation breakpoints.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1173,12 +1173,12 @@ async def test_breakpoints_2( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test execution resumes and breakpoints are removed after subscription removed.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1278,12 +1278,12 @@ async def test_breakpoints_3( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test breakpoints can be cleared.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1434,12 +1434,12 @@ async def test_script_mode( script_execution, ) -> None: """Test overlapping runs with max_runs > 1.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id flag = asyncio.Event() @@ -1502,12 +1502,12 @@ async def test_script_mode_2( script_execution, ) -> None: """Test overlapping runs with max_runs > 1.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id flag = asyncio.Event() @@ -1577,12 +1577,12 @@ async def test_trace_blueprint_automation( ) -> None: """Test trace of blueprint automation.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id domain = "automation" sun_config = { diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 794d4d5e773..6642807ac3f 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -17,16 +17,16 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: """Validate that setup entry also configure the client.""" client = ClientMock() - id = str(uuid4()) + device_id = str(uuid4()) config_entry = MockConfigEntry( domain=TWINKLY_DOMAIN, data={ CONF_HOST: TEST_HOST, - CONF_ID: id, + CONF_ID: device_id, CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, - entry_id=id, + entry_id=device_id, ) config_entry.add_to_hass(hass) diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index 6ce36b38c8f..783ed8b4585 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -37,7 +37,7 @@ class MockInput: def get_mock_inputs(input_list): """Return list of MockInput.""" - return [MockInput(input) for input in input_list] + return [MockInput(device_input) for device_input in input_list] @pytest.fixture(name="vizio_get_unique_id", autouse=True) diff --git a/tests/components/zwave_js/test_diagnostics.py b/tests/components/zwave_js/test_diagnostics.py index 054906cd0f6..ea354ab80d3 100644 --- a/tests/components/zwave_js/test_diagnostics.py +++ b/tests/components/zwave_js/test_diagnostics.py @@ -128,7 +128,9 @@ async def test_device_diagnostics( ) assert diagnostics_data["state"] == { **multisensor_6.data, - "values": {id: val.data for id, val in multisensor_6.values.items()}, + "values": { + value_id: val.data for value_id, val in multisensor_6.values.items() + }, "endpoints": { str(idx): endpoint.data for idx, endpoint in multisensor_6.endpoints.items() }, diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index 701bc342760..20dea85c3e4 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -2178,12 +2178,12 @@ def _find_run_id(traces, trace_type, item_id): async def assert_automation_condition_trace(hass_ws_client, automation_id, expected): """Test the result of automation condition.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id client = await hass_ws_client() diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index fb2793a75c7..690592a850b 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -2330,30 +2330,30 @@ async def test_cached_entity_properties( async def test_cached_entity_property_delete_attr(hass: HomeAssistant) -> None: """Test deleting an _attr corresponding to a cached property.""" - property = "has_entity_name" + property_name = "has_entity_name" ent = entity.Entity() - assert not hasattr(ent, f"_attr_{property}") + assert not hasattr(ent, f"_attr_{property_name}") with pytest.raises(AttributeError): - delattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False with pytest.raises(AttributeError): - delattr(ent, f"_attr_{property}") - assert not hasattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert not hasattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False - setattr(ent, f"_attr_{property}", True) - assert getattr(ent, property) is True + setattr(ent, f"_attr_{property_name}", True) + assert getattr(ent, property_name) is True - delattr(ent, f"_attr_{property}") - assert not hasattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert not hasattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False async def test_cached_entity_property_class_attribute(hass: HomeAssistant) -> None: """Test entity properties on class level work in derived classes.""" - property = "attribution" + property_name = "attribution" values = ["abcd", "efgh"] class EntityWithClassAttribute1(entity.Entity): @@ -2408,15 +2408,15 @@ async def test_cached_entity_property_class_attribute(hass: HomeAssistant) -> No ] for ent in entities: - assert getattr(ent[0], property) == values[0] - assert getattr(ent[1], property) == values[0] + assert getattr(ent[0], property_name) == values[0] + assert getattr(ent[1], property_name) == values[0] # Test update for ent in entities: - setattr(ent[0], f"_attr_{property}", values[1]) + setattr(ent[0], f"_attr_{property_name}", values[1]) for ent in entities: - assert getattr(ent[0], property) == values[1] - assert getattr(ent[1], property) == values[0] + assert getattr(ent[0], property_name) == values[1] + assert getattr(ent[1], property_name) == values[0] async def test_cached_entity_property_override(hass: HomeAssistant) -> None: diff --git a/tests/test_core.py b/tests/test_core.py index ce71fcd42e5..30665619fcd 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1148,11 +1148,11 @@ async def test_eventbus_filtered_listener(hass: HomeAssistant) -> None: calls.append(event) @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return not event_data["filtered"] - unsub = hass.bus.async_listen("test", listener, event_filter=filter) + unsub = hass.bus.async_listen("test", listener, event_filter=mock_filter) hass.bus.async_fire("test", {"filtered": True}) await hass.async_block_till_done() @@ -3274,11 +3274,11 @@ async def test_eventbus_lazy_object_creation(hass: HomeAssistant) -> None: calls.append(event) @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return not event_data["filtered"] - unsub = hass.bus.async_listen("test_1", listener, event_filter=filter) + unsub = hass.bus.async_listen("test_1", listener, event_filter=mock_filter) # Test lazy creation of Event objects with patch("homeassistant.core.Event") as mock_event: @@ -3343,7 +3343,7 @@ async def test_statemachine_report_state(hass: HomeAssistant) -> None: """Test report state event.""" @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return True @@ -3354,7 +3354,7 @@ async def test_statemachine_report_state(hass: HomeAssistant) -> None: hass.states.async_set("light.bowl", "on", {}) state_changed_events = async_capture_events(hass, EVENT_STATE_CHANGED) state_reported_events = [] - hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=filter) + hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=mock_filter) hass.states.async_set("light.bowl", "on") await hass.async_block_till_done() @@ -3385,7 +3385,7 @@ async def test_report_state_listener_restrictions(hass: HomeAssistant) -> None: """Mock listener.""" @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return False @@ -3394,7 +3394,7 @@ async def test_report_state_listener_restrictions(hass: HomeAssistant) -> None: hass.bus.async_listen(EVENT_STATE_REPORTED, listener) # Both filter and run_immediately - hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=filter) + hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=mock_filter) @pytest.mark.parametrize( diff --git a/tests/util/test_percentage.py b/tests/util/test_percentage.py index 2fc054fb4f1..3af42310e94 100644 --- a/tests/util/test_percentage.py +++ b/tests/util/test_percentage.py @@ -104,77 +104,77 @@ async def test_percentage_to_ordered_list_item() -> None: async def test_ranged_value_to_percentage_large() -> None: """Test a large range of low and high values convert a single value to a percentage.""" - range = (1, 255) + value_range = (1, 255) - assert ranged_value_to_percentage(range, 255) == 100 - assert ranged_value_to_percentage(range, 127) == 49 - assert ranged_value_to_percentage(range, 10) == 3 - assert ranged_value_to_percentage(range, 1) == 0 + assert ranged_value_to_percentage(value_range, 255) == 100 + assert ranged_value_to_percentage(value_range, 127) == 49 + assert ranged_value_to_percentage(value_range, 10) == 3 + assert ranged_value_to_percentage(value_range, 1) == 0 async def test_percentage_to_ranged_value_large() -> None: """Test a large range of low and high values convert a percentage to a single value.""" - range = (1, 255) + value_range = (1, 255) - assert percentage_to_ranged_value(range, 100) == 255 - assert percentage_to_ranged_value(range, 50) == 127.5 - assert percentage_to_ranged_value(range, 4) == 10.2 + assert percentage_to_ranged_value(value_range, 100) == 255 + assert percentage_to_ranged_value(value_range, 50) == 127.5 + assert percentage_to_ranged_value(value_range, 4) == 10.2 - assert math.ceil(percentage_to_ranged_value(range, 100)) == 255 - assert math.ceil(percentage_to_ranged_value(range, 50)) == 128 - assert math.ceil(percentage_to_ranged_value(range, 4)) == 11 + assert math.ceil(percentage_to_ranged_value(value_range, 100)) == 255 + assert math.ceil(percentage_to_ranged_value(value_range, 50)) == 128 + assert math.ceil(percentage_to_ranged_value(value_range, 4)) == 11 async def test_ranged_value_to_percentage_small() -> None: """Test a small range of low and high values convert a single value to a percentage.""" - range = (1, 6) + value_range = (1, 6) - assert ranged_value_to_percentage(range, 1) == 16 - assert ranged_value_to_percentage(range, 2) == 33 - assert ranged_value_to_percentage(range, 3) == 50 - assert ranged_value_to_percentage(range, 4) == 66 - assert ranged_value_to_percentage(range, 5) == 83 - assert ranged_value_to_percentage(range, 6) == 100 + assert ranged_value_to_percentage(value_range, 1) == 16 + assert ranged_value_to_percentage(value_range, 2) == 33 + assert ranged_value_to_percentage(value_range, 3) == 50 + assert ranged_value_to_percentage(value_range, 4) == 66 + assert ranged_value_to_percentage(value_range, 5) == 83 + assert ranged_value_to_percentage(value_range, 6) == 100 async def test_percentage_to_ranged_value_small() -> None: """Test a small range of low and high values convert a percentage to a single value.""" - range = (1, 6) + value_range = (1, 6) - assert math.ceil(percentage_to_ranged_value(range, 16)) == 1 - assert math.ceil(percentage_to_ranged_value(range, 33)) == 2 - assert math.ceil(percentage_to_ranged_value(range, 50)) == 3 - assert math.ceil(percentage_to_ranged_value(range, 66)) == 4 - assert math.ceil(percentage_to_ranged_value(range, 83)) == 5 - assert math.ceil(percentage_to_ranged_value(range, 100)) == 6 + assert math.ceil(percentage_to_ranged_value(value_range, 16)) == 1 + assert math.ceil(percentage_to_ranged_value(value_range, 33)) == 2 + assert math.ceil(percentage_to_ranged_value(value_range, 50)) == 3 + assert math.ceil(percentage_to_ranged_value(value_range, 66)) == 4 + assert math.ceil(percentage_to_ranged_value(value_range, 83)) == 5 + assert math.ceil(percentage_to_ranged_value(value_range, 100)) == 6 async def test_ranged_value_to_percentage_starting_at_one() -> None: """Test a range that starts with 1.""" - range = (1, 4) + value_range = (1, 4) - assert ranged_value_to_percentage(range, 1) == 25 - assert ranged_value_to_percentage(range, 2) == 50 - assert ranged_value_to_percentage(range, 3) == 75 - assert ranged_value_to_percentage(range, 4) == 100 + assert ranged_value_to_percentage(value_range, 1) == 25 + assert ranged_value_to_percentage(value_range, 2) == 50 + assert ranged_value_to_percentage(value_range, 3) == 75 + assert ranged_value_to_percentage(value_range, 4) == 100 async def test_ranged_value_to_percentage_starting_high() -> None: """Test a range that does not start with 1.""" - range = (101, 255) + value_range = (101, 255) - assert ranged_value_to_percentage(range, 101) == 0 - assert ranged_value_to_percentage(range, 139) == 25 - assert ranged_value_to_percentage(range, 178) == 50 - assert ranged_value_to_percentage(range, 217) == 75 - assert ranged_value_to_percentage(range, 255) == 100 + assert ranged_value_to_percentage(value_range, 101) == 0 + assert ranged_value_to_percentage(value_range, 139) == 25 + assert ranged_value_to_percentage(value_range, 178) == 50 + assert ranged_value_to_percentage(value_range, 217) == 75 + assert ranged_value_to_percentage(value_range, 255) == 100 async def test_ranged_value_to_percentage_starting_zero() -> None: """Test a range that starts with 0.""" - range = (0, 3) + value_range = (0, 3) - assert ranged_value_to_percentage(range, 0) == 25 - assert ranged_value_to_percentage(range, 1) == 50 - assert ranged_value_to_percentage(range, 2) == 75 - assert ranged_value_to_percentage(range, 3) == 100 + assert ranged_value_to_percentage(value_range, 0) == 25 + assert ranged_value_to_percentage(value_range, 1) == 50 + assert ranged_value_to_percentage(value_range, 2) == 75 + assert ranged_value_to_percentage(value_range, 3) == 100 diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 113a348c1d1..f17489e1488 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -568,13 +568,13 @@ def test_no_recursive_secrets( def test_input_class() -> None: """Test input class.""" - input = yaml_loader.Input("hello") - input2 = yaml_loader.Input("hello") + yaml_input = yaml_loader.Input("hello") + yaml_input2 = yaml_loader.Input("hello") - assert input.name == "hello" - assert input == input2 + assert yaml_input.name == "hello" + assert yaml_input == yaml_input2 - assert len({input, input2}) == 1 + assert len({yaml_input, yaml_input2}) == 1 def test_input(try_both_loaders, try_both_dumpers) -> None: From f26ac465b5ce0e4ca36caf770c38d31c078fb2d0 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 21 Apr 2024 23:38:32 +0200 Subject: [PATCH 229/426] Introduce base entity for totalconnect (#115936) --- .../totalconnect/alarm_control_panel.py | 6 ++-- .../components/totalconnect/binary_sensor.py | 33 +++++------------ .../components/totalconnect/entity.py | 35 +++++++++++++++++++ 3 files changed, 46 insertions(+), 28 deletions(-) create mode 100644 homeassistant/components/totalconnect/entity.py diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 436e3198650..fcafd47037d 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -23,10 +23,10 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_platform from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN +from .entity import TotalConnectEntity SERVICE_ALARM_ARM_AWAY_INSTANT = "arm_away_instant" SERVICE_ALARM_ARM_HOME_INSTANT = "arm_home_instant" @@ -70,9 +70,7 @@ async def async_setup_entry( ) -class TotalConnectAlarm( - CoordinatorEntity[TotalConnectDataUpdateCoordinator], alarm.AlarmControlPanelEntity -): +class TotalConnectAlarm(TotalConnectEntity, alarm.AlarmControlPanelEntity): """Represent an TotalConnect status.""" _attr_supported_features = ( diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 696f0dbcf6f..18340d5d6d3 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -15,12 +15,11 @@ from homeassistant.components.binary_sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN +from .entity import TotalConnectEntity, TotalConnectZoneEntity LOW_BATTERY = "low_battery" TAMPER = "tamper" @@ -129,7 +128,7 @@ async def async_setup_entry( for zone in location.zones.values(): sensors.append( TotalConnectZoneBinarySensor( - coordinator, SECURITY_BINARY_SENSOR, location_id, zone + coordinator, SECURITY_BINARY_SENSOR, zone, location_id ) ) @@ -138,8 +137,8 @@ async def async_setup_entry( TotalConnectZoneBinarySensor( coordinator, description, - location_id, zone, + location_id, ) for description in NO_BUTTON_BINARY_SENSORS ) @@ -147,10 +146,8 @@ async def async_setup_entry( async_add_entities(sensors) -class TotalConnectZoneBinarySensor( - CoordinatorEntity[TotalConnectDataUpdateCoordinator], BinarySensorEntity -): - """Represent an TotalConnect zone.""" +class TotalConnectZoneBinarySensor(TotalConnectZoneEntity, BinarySensorEntity): + """Represent a TotalConnect zone.""" entity_description: TotalConnectZoneBinarySensorEntityDescription @@ -158,28 +155,18 @@ class TotalConnectZoneBinarySensor( self, coordinator: TotalConnectDataUpdateCoordinator, entity_description: TotalConnectZoneBinarySensorEntityDescription, - location_id: str, zone: TotalConnectZone, + location_id: str, ) -> None: """Initialize the TotalConnect status.""" - super().__init__(coordinator) + super().__init__(coordinator, zone, location_id, entity_description.key) self.entity_description = entity_description - self._location_id = location_id - self._zone = zone self._attr_name = f"{zone.description}{entity_description.name}" - self._attr_unique_id = f"{location_id}_{zone.zoneid}_{entity_description.key}" - self._attr_is_on = None self._attr_extra_state_attributes = { "zone_id": zone.zoneid, - "location_id": self._location_id, + "location_id": location_id, "partition": zone.partition, } - identifier = zone.sensor_serial_number or f"zone_{zone.zoneid}" - self._attr_device_info = DeviceInfo( - name=zone.description, - identifiers={(DOMAIN, identifier)}, - serial_number=zone.sensor_serial_number, - ) @property def is_on(self) -> bool: @@ -194,9 +181,7 @@ class TotalConnectZoneBinarySensor( return super().device_class -class TotalConnectAlarmBinarySensor( - CoordinatorEntity[TotalConnectDataUpdateCoordinator], BinarySensorEntity -): +class TotalConnectAlarmBinarySensor(TotalConnectEntity, BinarySensorEntity): """Represent a TotalConnect alarm device binary sensors.""" entity_description: TotalConnectAlarmBinarySensorEntityDescription diff --git a/homeassistant/components/totalconnect/entity.py b/homeassistant/components/totalconnect/entity.py new file mode 100644 index 00000000000..e7ab4b3575c --- /dev/null +++ b/homeassistant/components/totalconnect/entity.py @@ -0,0 +1,35 @@ +"""Base class for TotalConnect entities.""" + +from total_connect_client.zone import TotalConnectZone + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DOMAIN, TotalConnectDataUpdateCoordinator + + +class TotalConnectEntity(CoordinatorEntity[TotalConnectDataUpdateCoordinator]): + """Represent a TotalConnect entity.""" + + +class TotalConnectZoneEntity(TotalConnectEntity): + """Represent a TotalConnect zone.""" + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + zone: TotalConnectZone, + location_id: str, + key: str, + ) -> None: + """Initialize the TotalConnect zone.""" + super().__init__(coordinator) + self._location_id = location_id + self._zone = zone + self._attr_unique_id = f"{location_id}_{zone.zoneid}_{key}" + identifier = zone.sensor_serial_number or f"zone_{zone.zoneid}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + name=zone.description, + serial_number=zone.sensor_serial_number, + ) From e29b301dd1dc0b94baf0001677e20218333a8b3d Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 21 Apr 2024 15:52:47 -0700 Subject: [PATCH 230/426] Bump ical to 8.0.0 (#115907) Co-authored-by: J. Nick Koston --- homeassistant/components/google/manifest.json | 2 +- homeassistant/components/local_calendar/manifest.json | 2 +- homeassistant/components/local_todo/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 00561cb5fd6..ac43dc58953 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/calendar.google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==7.0.3"] + "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==8.0.0"] } diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 1c13970503d..b1c7d6a3a34 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 3bcb8af9f43..44c76a56a8f 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4a4ef23b583..dade5079fbd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1113,7 +1113,7 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b935fcbaf42..d1bfeff488f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -906,7 +906,7 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 From 70d4b4d20d950c7bfcef261f3d24656e30d8ae4b Mon Sep 17 00:00:00 2001 From: andarotajo <55669170+andarotajo@users.noreply.github.com> Date: Mon, 22 Apr 2024 09:13:09 +0200 Subject: [PATCH 231/426] Add optional location based region to dwd_weather_warnings (#96027) * Add device tracker option * Update const name to be more understandable * Clean up sensor code * Clean up init and coordinator * Add tests and update util function and it's usage * Switch to using the registry entry and add tests * Clean up code * Consolidate duplicate code and adjust tests * Fix runtime error * Fix blocking of the event loop * Adjust API object handling * Update homeassistant/components/dwd_weather_warnings/exceptions.py * Optimize coordinator data update --------- Co-authored-by: Erik Montnemery --- .../dwd_weather_warnings/__init__.py | 11 +- .../dwd_weather_warnings/config_flow.py | 75 +++++++-- .../components/dwd_weather_warnings/const.py | 1 + .../dwd_weather_warnings/coordinator.py | 66 +++++++- .../dwd_weather_warnings/exceptions.py | 7 + .../components/dwd_weather_warnings/sensor.py | 24 +-- .../dwd_weather_warnings/strings.json | 13 +- .../components/dwd_weather_warnings/util.py | 39 +++++ .../dwd_weather_warnings/test_config_flow.py | 143 ++++++++++++++++-- .../dwd_weather_warnings/test_init.py | 87 ++++++++++- 10 files changed, 403 insertions(+), 63 deletions(-) create mode 100644 homeassistant/components/dwd_weather_warnings/exceptions.py create mode 100644 homeassistant/components/dwd_weather_warnings/util.py diff --git a/homeassistant/components/dwd_weather_warnings/__init__.py b/homeassistant/components/dwd_weather_warnings/__init__.py index 275d47d15ca..9cf73a90a73 100644 --- a/homeassistant/components/dwd_weather_warnings/__init__.py +++ b/homeassistant/components/dwd_weather_warnings/__init__.py @@ -2,23 +2,16 @@ from __future__ import annotations -from dwdwfsapi import DwdWeatherWarningsAPI - from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import CONF_REGION_IDENTIFIER, DOMAIN, PLATFORMS +from .const import DOMAIN, PLATFORMS from .coordinator import DwdWeatherWarningsCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - region_identifier: str = entry.data[CONF_REGION_IDENTIFIER] - - # Initialize the API and coordinator. - api = await hass.async_add_executor_job(DwdWeatherWarningsAPI, region_identifier) - coordinator = DwdWeatherWarningsCoordinator(hass, api) - + coordinator = DwdWeatherWarningsCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator diff --git a/homeassistant/components/dwd_weather_warnings/config_flow.py b/homeassistant/components/dwd_weather_warnings/config_flow.py index 5076dbae187..f148f4e05ac 100644 --- a/homeassistant/components/dwd_weather_warnings/config_flow.py +++ b/homeassistant/components/dwd_weather_warnings/config_flow.py @@ -8,9 +8,15 @@ from dwdwfsapi import DwdWeatherWarningsAPI import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig -from .const import CONF_REGION_IDENTIFIER, DOMAIN +from .const import CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, DOMAIN +from .exceptions import EntityNotFoundError +from .util import get_position_data + +EXCLUSIVE_OPTIONS = (CONF_REGION_IDENTIFIER, CONF_REGION_DEVICE_TRACKER) class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN): @@ -25,27 +31,70 @@ class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict = {} if user_input is not None: - region_identifier = user_input[CONF_REGION_IDENTIFIER] + # Check, if either CONF_REGION_IDENTIFIER or CONF_GPS_TRACKER has been set. + if all(k not in user_input for k in EXCLUSIVE_OPTIONS): + errors["base"] = "no_identifier" + elif all(k in user_input for k in EXCLUSIVE_OPTIONS): + errors["base"] = "ambiguous_identifier" + elif CONF_REGION_IDENTIFIER in user_input: + # Validate region identifier using the API + identifier = user_input[CONF_REGION_IDENTIFIER] - # Validate region identifier using the API - if not await self.hass.async_add_executor_job( - DwdWeatherWarningsAPI, region_identifier - ): - errors["base"] = "invalid_identifier" + if not await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, identifier + ): + errors["base"] = "invalid_identifier" - if not errors: - # Set the unique ID for this config entry. - await self.async_set_unique_id(region_identifier) - self._abort_if_unique_id_configured() + if not errors: + # Set the unique ID for this config entry. + await self.async_set_unique_id(identifier) + self._abort_if_unique_id_configured() - return self.async_create_entry(title=region_identifier, data=user_input) + return self.async_create_entry(title=identifier, data=user_input) + else: # CONF_REGION_DEVICE_TRACKER + device_tracker = user_input[CONF_REGION_DEVICE_TRACKER] + registry = er.async_get(self.hass) + entity_entry = registry.async_get(device_tracker) + + if entity_entry is None: + errors["base"] = "entity_not_found" + else: + try: + position = get_position_data(self.hass, entity_entry.id) + except EntityNotFoundError: + errors["base"] = "entity_not_found" + except AttributeError: + errors["base"] = "attribute_not_found" + else: + # Validate position using the API + if not await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, position + ): + errors["base"] = "invalid_identifier" + + # Position is valid here, because the API call was successful. + if not errors and position is not None and entity_entry is not None: + # Set the unique ID for this config entry. + await self.async_set_unique_id(entity_entry.id) + self._abort_if_unique_id_configured() + + # Replace entity ID with registry ID for more stability. + user_input[CONF_REGION_DEVICE_TRACKER] = entity_entry.id + + return self.async_create_entry( + title=device_tracker.removeprefix("device_tracker."), + data=user_input, + ) return self.async_show_form( step_id="user", errors=errors, data_schema=vol.Schema( { - vol.Required(CONF_REGION_IDENTIFIER): cv.string, + vol.Optional(CONF_REGION_IDENTIFIER): cv.string, + vol.Optional(CONF_REGION_DEVICE_TRACKER): EntitySelector( + EntitySelectorConfig(domain="device_tracker") + ), } ), ) diff --git a/homeassistant/components/dwd_weather_warnings/const.py b/homeassistant/components/dwd_weather_warnings/const.py index 75969dee119..4f0a6767660 100644 --- a/homeassistant/components/dwd_weather_warnings/const.py +++ b/homeassistant/components/dwd_weather_warnings/const.py @@ -14,6 +14,7 @@ DOMAIN: Final = "dwd_weather_warnings" CONF_REGION_NAME: Final = "region_name" CONF_REGION_IDENTIFIER: Final = "region_identifier" +CONF_REGION_DEVICE_TRACKER: Final = "region_device_tracker" ATTR_REGION_NAME: Final = "region_name" ATTR_REGION_ID: Final = "region_id" diff --git a/homeassistant/components/dwd_weather_warnings/coordinator.py b/homeassistant/components/dwd_weather_warnings/coordinator.py index a1232697130..465a7c09750 100644 --- a/homeassistant/components/dwd_weather_warnings/coordinator.py +++ b/homeassistant/components/dwd_weather_warnings/coordinator.py @@ -4,23 +4,79 @@ from __future__ import annotations from dwdwfsapi import DwdWeatherWarningsAPI +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import location -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER +from .const import ( + CONF_REGION_DEVICE_TRACKER, + CONF_REGION_IDENTIFIER, + DEFAULT_SCAN_INTERVAL, + DOMAIN, + LOGGER, +) +from .exceptions import EntityNotFoundError +from .util import get_position_data class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]): """Custom coordinator for the dwd_weather_warnings integration.""" - def __init__(self, hass: HomeAssistant, api: DwdWeatherWarningsAPI) -> None: + config_entry: ConfigEntry + api: DwdWeatherWarningsAPI + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the dwd_weather_warnings coordinator.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL ) - self.api = api + self._device_tracker = None + self._previous_position = None + + async def async_config_entry_first_refresh(self) -> None: + """Perform first refresh.""" + if region_identifier := self.config_entry.data.get(CONF_REGION_IDENTIFIER): + self.api = await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, region_identifier + ) + else: + self._device_tracker = self.config_entry.data.get( + CONF_REGION_DEVICE_TRACKER + ) + + await super().async_config_entry_first_refresh() async def _async_update_data(self) -> None: """Get the latest data from the DWD Weather Warnings API.""" - await self.hass.async_add_executor_job(self.api.update) + if self._device_tracker: + try: + position = get_position_data(self.hass, self._device_tracker) + except (EntityNotFoundError, AttributeError) as err: + raise UpdateFailed(f"Error fetching position: {repr(err)}") from err + + distance = None + if self._previous_position is not None: + distance = location.distance( + self._previous_position[0], + self._previous_position[1], + position[0], + position[1], + ) + + if distance is None or distance > 50: + # Only create a new object on the first update + # or when the distance to the previous position + # changes by more than 50 meters (to take GPS + # inaccuracy into account). + self.api = await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, position + ) + else: + # Otherwise update the API to check for new warnings. + await self.hass.async_add_executor_job(self.api.update) + + self._previous_position = position + else: + await self.hass.async_add_executor_job(self.api.update) diff --git a/homeassistant/components/dwd_weather_warnings/exceptions.py b/homeassistant/components/dwd_weather_warnings/exceptions.py new file mode 100644 index 00000000000..cd61cfa6bae --- /dev/null +++ b/homeassistant/components/dwd_weather_warnings/exceptions.py @@ -0,0 +1,7 @@ +"""Exceptions for the dwd_weather_warnings integration.""" + +from homeassistant.exceptions import HomeAssistantError + + +class EntityNotFoundError(HomeAssistantError): + """When a referenced entity was not found.""" diff --git a/homeassistant/components/dwd_weather_warnings/sensor.py b/homeassistant/components/dwd_weather_warnings/sensor.py index d3e3b4a3772..d62c0f4f192 100644 --- a/homeassistant/components/dwd_weather_warnings/sensor.py +++ b/homeassistant/components/dwd_weather_warnings/sensor.py @@ -11,6 +11,8 @@ Wetterwarnungen (Stufe 1) from __future__ import annotations +from typing import Any + from homeassistant.components.sensor import SensorEntity, SensorEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -93,29 +95,27 @@ class DwdWeatherWarningsSensor( entry_type=DeviceEntryType.SERVICE, ) - self.api = coordinator.api - @property - def native_value(self): + def native_value(self) -> int | None: """Return the state of the sensor.""" if self.entity_description.key == CURRENT_WARNING_SENSOR: - return self.api.current_warning_level + return self.coordinator.api.current_warning_level - return self.api.expected_warning_level + return self.coordinator.api.expected_warning_level @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the sensor.""" data = { - ATTR_REGION_NAME: self.api.warncell_name, - ATTR_REGION_ID: self.api.warncell_id, - ATTR_LAST_UPDATE: self.api.last_update, + ATTR_REGION_NAME: self.coordinator.api.warncell_name, + ATTR_REGION_ID: self.coordinator.api.warncell_id, + ATTR_LAST_UPDATE: self.coordinator.api.last_update, } if self.entity_description.key == CURRENT_WARNING_SENSOR: - searched_warnings = self.api.current_warnings + searched_warnings = self.coordinator.api.current_warnings else: - searched_warnings = self.api.expected_warnings + searched_warnings = self.coordinator.api.expected_warnings data[ATTR_WARNING_COUNT] = len(searched_warnings) @@ -142,4 +142,4 @@ class DwdWeatherWarningsSensor( @property def available(self) -> bool: """Could the device be accessed during the last update call.""" - return self.api.data_valid + return self.coordinator.api.data_valid diff --git a/homeassistant/components/dwd_weather_warnings/strings.json b/homeassistant/components/dwd_weather_warnings/strings.json index aa460dcc6d5..3f421d338a7 100644 --- a/homeassistant/components/dwd_weather_warnings/strings.json +++ b/homeassistant/components/dwd_weather_warnings/strings.json @@ -2,17 +2,22 @@ "config": { "step": { "user": { - "description": "To identify the desired region, the warncell ID / name is required.", + "description": "To identify the desired region, either the warncell ID / name or device tracker is required. The provided device tracker has to contain the attributes 'latitude' and 'longitude'.", "data": { - "region_identifier": "Warncell ID or name" + "region_identifier": "Warncell ID or name", + "region_device_tracker": "Device tracker entity" } } }, "error": { - "invalid_identifier": "The specified region identifier is invalid." + "no_identifier": "Either the region identifier or device tracker is required.", + "ambiguous_identifier": "The region identifier and device tracker can not be specified together.", + "invalid_identifier": "The specified region identifier / device tracker is invalid.", + "entity_not_found": "The specified device tracker entity was not found.", + "attribute_not_found": "The required `latitude` or `longitude` attribute was not found in the specified device tracker." }, "abort": { - "already_configured": "Warncell ID / name is already configured.", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_identifier": "[%key:component::dwd_weather_warnings::config::error::invalid_identifier%]" } }, diff --git a/homeassistant/components/dwd_weather_warnings/util.py b/homeassistant/components/dwd_weather_warnings/util.py new file mode 100644 index 00000000000..730ebf4b71e --- /dev/null +++ b/homeassistant/components/dwd_weather_warnings/util.py @@ -0,0 +1,39 @@ +"""Util functions for the dwd_weather_warnings integration.""" + +from __future__ import annotations + +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .exceptions import EntityNotFoundError + + +def get_position_data( + hass: HomeAssistant, registry_id: str +) -> tuple[float, float] | None: + """Extract longitude and latitude from a device tracker.""" + registry = er.async_get(hass) + registry_entry = registry.async_get(registry_id) + if registry_entry is None: + raise EntityNotFoundError(f"Failed to find registry entry {registry_id}") + + entity = hass.states.get(registry_entry.entity_id) + if entity is None: + raise EntityNotFoundError(f"Failed to find entity {registry_entry.entity_id}") + + latitude = entity.attributes.get(ATTR_LATITUDE) + if not latitude: + raise AttributeError( + f"Failed to find attribute '{ATTR_LATITUDE}' in {registry_entry.entity_id}", + ATTR_LATITUDE, + ) + + longitude = entity.attributes.get(ATTR_LONGITUDE) + if not longitude: + raise AttributeError( + f"Failed to find attribute '{ATTR_LONGITUDE}' in {registry_entry.entity_id}", + ATTR_LONGITUDE, + ) + + return (latitude, longitude) diff --git a/tests/components/dwd_weather_warnings/test_config_flow.py b/tests/components/dwd_weather_warnings/test_config_flow.py index 3558ff5ed93..119c029767a 100644 --- a/tests/components/dwd_weather_warnings/test_config_flow.py +++ b/tests/components/dwd_weather_warnings/test_config_flow.py @@ -6,34 +6,31 @@ from unittest.mock import patch import pytest from homeassistant.components.dwd_weather_warnings.const import ( - ADVANCE_WARNING_SENSOR, + CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, - CONF_REGION_NAME, - CURRENT_WARNING_SENSOR, DOMAIN, ) from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -DEMO_CONFIG_ENTRY: Final = { +DEMO_CONFIG_ENTRY_REGION: Final = { CONF_REGION_IDENTIFIER: "807111000", } -DEMO_YAML_CONFIGURATION: Final = { - CONF_NAME: "Unit Test", - CONF_REGION_NAME: "807111000", - CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], +DEMO_CONFIG_ENTRY_GPS: Final = { + CONF_REGION_DEVICE_TRACKER: "device_tracker.test_gps", } pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_create_entry(hass: HomeAssistant) -> None: - """Test that the full config flow works.""" +async def test_create_entry_region(hass: HomeAssistant) -> None: + """Test that the full config flow works for a region identifier.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) @@ -45,7 +42,7 @@ async def test_create_entry(hass: HomeAssistant) -> None: return_value=False, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) # Test for invalid region identifier. @@ -58,7 +55,7 @@ async def test_create_entry(hass: HomeAssistant) -> None: return_value=True, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) # Test for successfully created entry. @@ -70,12 +67,95 @@ async def test_create_entry(hass: HomeAssistant) -> None: } +async def test_create_entry_gps( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that the full config flow works for a device tracker.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + + # Test for missing registry entry error. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "entity_not_found"} + + # Test for missing device tracker error. + registry_entry = entity_registry.async_get_or_create( + "device_tracker", DOMAIN, "uuid", suggested_object_id="test_gps" + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "entity_not_found"} + + # Test for missing attribute error. + hass.states.async_set( + DEMO_CONFIG_ENTRY_GPS[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LONGITUDE: "7.610263"}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "attribute_not_found"} + + # Test for invalid provided identifier. + hass.states.async_set( + DEMO_CONFIG_ENTRY_GPS[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LATITUDE: "50.180454", ATTR_LONGITUDE: "7.610263"}, + ) + + with patch( + "homeassistant.components.dwd_weather_warnings.config_flow.DwdWeatherWarningsAPI", + return_value=False, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "invalid_identifier"} + + # Test for successfully created entry. + with patch( + "homeassistant.components.dwd_weather_warnings.config_flow.DwdWeatherWarningsAPI", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "test_gps" + assert result["data"] == { + CONF_REGION_DEVICE_TRACKER: registry_entry.id, + } + + async def test_config_flow_already_configured(hass: HomeAssistant) -> None: """Test aborting, if the warncell ID / name is already configured during the config.""" entry = MockConfigEntry( domain=DOMAIN, - data=DEMO_CONFIG_ENTRY.copy(), - unique_id=DEMO_CONFIG_ENTRY[CONF_REGION_IDENTIFIER], + data=DEMO_CONFIG_ENTRY_REGION.copy(), + unique_id=DEMO_CONFIG_ENTRY_REGION[CONF_REGION_IDENTIFIER], ) entry.add_to_hass(hass) @@ -92,9 +172,40 @@ async def test_config_flow_already_configured(hass: HomeAssistant) -> None: return_value=True, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_config_flow_with_errors(hass: HomeAssistant) -> None: + """Test error scenarios during the configuration.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + + # Test error for empty input data. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "no_identifier"} + + # Test error for setting both options during configuration. + demo_input = DEMO_CONFIG_ENTRY_REGION.copy() + demo_input.update(DEMO_CONFIG_ENTRY_GPS.copy()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=demo_input, + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "ambiguous_identifier"} diff --git a/tests/components/dwd_weather_warnings/test_init.py b/tests/components/dwd_weather_warnings/test_init.py index db7afaadec9..bfd03b2fdd4 100644 --- a/tests/components/dwd_weather_warnings/test_init.py +++ b/tests/components/dwd_weather_warnings/test_init.py @@ -4,26 +4,40 @@ from typing import Final from homeassistant.components.dwd_weather_warnings.const import ( ADVANCE_WARNING_SENSOR, + CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, CURRENT_WARNING_SENSOR, DOMAIN, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + CONF_MONITORED_CONDITIONS, + CONF_NAME, + STATE_HOME, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -DEMO_CONFIG_ENTRY: Final = { +DEMO_IDENTIFIER_CONFIG_ENTRY: Final = { CONF_NAME: "Unit Test", CONF_REGION_IDENTIFIER: "807111000", CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], } +DEMO_TRACKER_CONFIG_ENTRY: Final = { + CONF_NAME: "Unit Test", + CONF_REGION_DEVICE_TRACKER: "device_tracker.test_gps", + CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], +} + async def test_load_unload_entry(hass: HomeAssistant) -> None: - """Test loading and unloading the integration.""" - entry = MockConfigEntry(domain=DOMAIN, data=DEMO_CONFIG_ENTRY) + """Test loading and unloading the integration with a region identifier based entry.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_IDENTIFIER_CONFIG_ENTRY) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -36,3 +50,68 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert entry.entry_id not in hass.data[DOMAIN] + + +async def test_load_invalid_registry_entry(hass: HomeAssistant) -> None: + """Test loading the integration with an invalid registry entry ID.""" + INVALID_DATA = DEMO_TRACKER_CONFIG_ENTRY.copy() + INVALID_DATA[CONF_REGION_DEVICE_TRACKER] = "invalid_registry_id" + entry = MockConfigEntry(domain=DOMAIN, data=INVALID_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_missing_device_tracker(hass: HomeAssistant) -> None: + """Test loading the integration with a missing device tracker.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_missing_required_attribute(hass: HomeAssistant) -> None: + """Test loading the integration with a device tracker missing a required attribute.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + + hass.states.async_set( + DEMO_TRACKER_CONFIG_ENTRY[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LONGITUDE: "7.610263"}, + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_valid_device_tracker( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test loading the integration with a valid device tracker based entry.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + entity_registry.async_get_or_create( + "device_tracker", + entry.domain, + "uuid", + suggested_object_id="test_gps", + config_entry=entry, + ) + + hass.states.async_set( + DEMO_TRACKER_CONFIG_ENTRY[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LATITUDE: "50.180454", ATTR_LONGITUDE: "7.610263"}, + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert entry.entry_id in hass.data[DOMAIN] From aeaa1f84c06f5ff027fe51439e973195e469a0aa Mon Sep 17 00:00:00 2001 From: rappenze Date: Mon, 22 Apr 2024 09:29:10 +0200 Subject: [PATCH 232/426] Add type hints in fibaro device (#106874) * Add typings in fibaro device * Fix type hints * Fix type hints * Remove unused method parameter * Improve log message --------- Co-authored-by: Erik Montnemery --- homeassistant/components/fibaro/__init__.py | 29 ++++++++++--------- .../components/fibaro/binary_sensor.py | 4 +-- homeassistant/components/fibaro/cover.py | 6 ++-- 3 files changed, 20 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 2c1405130b4..4c1feb27629 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -454,37 +454,38 @@ class FibaroDevice(Entity): if not fibaro_device.visible: self._attr_entity_registry_visible_default = False - async def async_added_to_hass(self): + async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" self.controller.register(self.fibaro_device.fibaro_id, self._update_callback) - def _update_callback(self): + def _update_callback(self) -> None: """Update the state.""" self.schedule_update_ha_state(True) @property - def level(self): + def level(self) -> int | None: """Get the level of Fibaro device.""" if self.fibaro_device.value.has_value: return self.fibaro_device.value.int_value() return None @property - def level2(self): + def level2(self) -> int | None: """Get the tilt level of Fibaro device.""" if self.fibaro_device.value_2.has_value: return self.fibaro_device.value_2.int_value() return None - def dont_know_message(self, action): + def dont_know_message(self, cmd: str) -> None: """Make a warning in case we don't know how to perform an action.""" _LOGGER.warning( - "Not sure how to setValue: %s (available actions: %s)", + "Not sure how to %s: %s (available actions: %s)", + cmd, str(self.ha_id), str(self.fibaro_device.actions), ) - def set_level(self, level): + def set_level(self, level: int) -> None: """Set the level of Fibaro device.""" self.action("setValue", level) if self.fibaro_device.value.has_value: @@ -492,21 +493,21 @@ class FibaroDevice(Entity): if self.fibaro_device.has_brightness: self.fibaro_device.properties["brightness"] = level - def set_level2(self, level): + def set_level2(self, level: int) -> None: """Set the level2 of Fibaro device.""" self.action("setValue2", level) if self.fibaro_device.value_2.has_value: self.fibaro_device.properties["value2"] = level - def call_turn_on(self): + def call_turn_on(self) -> None: """Turn on the Fibaro device.""" self.action("turnOn") - def call_turn_off(self): + def call_turn_off(self) -> None: """Turn off the Fibaro device.""" self.action("turnOff") - def call_set_color(self, red, green, blue, white): + def call_set_color(self, red: int, green: int, blue: int, white: int) -> None: """Set the color of Fibaro device.""" red = int(max(0, min(255, red))) green = int(max(0, min(255, green))) @@ -516,7 +517,7 @@ class FibaroDevice(Entity): self.fibaro_device.properties["color"] = color_str self.action("setColor", str(red), str(green), str(blue), str(white)) - def action(self, cmd, *args): + def action(self, cmd: str, *args: Any) -> None: """Perform an action on the Fibaro HC.""" if cmd in self.fibaro_device.actions: self.fibaro_device.execute_action(cmd, args) @@ -525,12 +526,12 @@ class FibaroDevice(Entity): self.dont_know_message(cmd) @property - def current_binary_state(self): + def current_binary_state(self) -> bool: """Return the current binary state.""" return self.fibaro_device.value.bool_value(False) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes of the device.""" attr = {"fibaro_id": self.fibaro_device.fibaro_id} diff --git a/homeassistant/components/fibaro/binary_sensor.py b/homeassistant/components/fibaro/binary_sensor.py index c0980025555..3c965c11b34 100644 --- a/homeassistant/components/fibaro/binary_sensor.py +++ b/homeassistant/components/fibaro/binary_sensor.py @@ -76,9 +76,9 @@ class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): self._attr_icon = SENSOR_TYPES[self._fibaro_sensor_type][1] @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the extra state attributes of the device.""" - return super().extra_state_attributes | self._own_extra_state_attributes + return {**super().extra_state_attributes, **self._own_extra_state_attributes} def update(self) -> None: """Get the latest data and update the state.""" diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index 16be6e98ae1..e71ae8982e7 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from pyfibaro.fibaro_device import DeviceModel @@ -80,11 +80,11 @@ class FibaroCover(FibaroDevice, CoverEntity): def set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self.set_level(kwargs.get(ATTR_POSITION)) + self.set_level(cast(int, kwargs.get(ATTR_POSITION))) def set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self.set_level2(kwargs.get(ATTR_TILT_POSITION)) + self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION))) @property def is_closed(self) -> bool | None: From de75f8223517029abc1fe94af731a15a55e08df1 Mon Sep 17 00:00:00 2001 From: rappenze Date: Mon, 22 Apr 2024 09:29:58 +0200 Subject: [PATCH 233/426] Refactor fibaro connect (#106875) * Refactor fibaro connect * Remove obsolete test * Add comment about ignored return value --- homeassistant/components/fibaro/__init__.py | 21 +++++-------- .../components/fibaro/config_flow.py | 11 ++----- tests/components/fibaro/test_config_flow.py | 30 ------------------- 3 files changed, 9 insertions(+), 53 deletions(-) diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 4c1feb27629..5b7908ddf08 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -108,26 +108,21 @@ class FibaroController: # Device infos by fibaro device id self._device_infos: dict[int, DeviceInfo] = {} - def connect(self) -> bool: + def connect(self) -> None: """Start the communication with the Fibaro controller.""" - connected = self._client.connect() + # Return value doesn't need to be checked, + # it is only relevant when connecting without credentials + self._client.connect() info = self._client.read_info() self.hub_serial = info.serial_number self.hub_name = info.hc_name self.hub_model = info.platform self.hub_software_version = info.current_version - if connected is False: - _LOGGER.error( - "Invalid login for Fibaro HC. Please check username and password" - ) - return False - self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()} self._read_devices() self._scenes = self._client.read_scenes() - return True def connect_with_error_handling(self) -> None: """Translate connect errors to easily differentiate auth and connect failures. @@ -135,9 +130,7 @@ class FibaroController: When there is a better error handling in the used library this can be improved. """ try: - connected = self.connect() - if not connected: - raise FibaroConnectFailed("Connect status is false") + self.connect() except HTTPError as http_ex: if http_ex.response.status_code == 403: raise FibaroAuthFailed from http_ex @@ -382,7 +375,7 @@ class FibaroController: pass -def _init_controller(data: Mapping[str, Any]) -> FibaroController: +def init_controller(data: Mapping[str, Any]) -> FibaroController: """Validate the user input allows us to connect to fibaro.""" controller = FibaroController(data) controller.connect_with_error_handling() @@ -395,7 +388,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: The unique id of the config entry is the serial number of the home center. """ try: - controller = await hass.async_add_executor_job(_init_controller, entry.data) + controller = await hass.async_add_executor_job(init_controller, entry.data) except FibaroConnectFailed as connect_ex: raise ConfigEntryNotReady( f"Could not connect to controller at {entry.data[CONF_URL]}" diff --git a/homeassistant/components/fibaro/config_flow.py b/homeassistant/components/fibaro/config_flow.py index 8c2fb502488..9003704348d 100644 --- a/homeassistant/components/fibaro/config_flow.py +++ b/homeassistant/components/fibaro/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResu from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant -from . import FibaroAuthFailed, FibaroConnectFailed, FibaroController +from . import FibaroAuthFailed, FibaroConnectFailed, init_controller from .const import CONF_IMPORT_PLUGINS, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -28,19 +28,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema( ) -def _connect_to_fibaro(data: dict[str, Any]) -> FibaroController: - """Validate the user input allows us to connect to fibaro.""" - controller = FibaroController(data) - controller.connect_with_error_handling() - return controller - - async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - controller = await hass.async_add_executor_job(_connect_to_fibaro, data) + controller = await hass.async_add_executor_job(init_controller, data) _LOGGER.debug( "Successfully connected to fibaro home center %s with name %s", diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index dcf5f12a24a..b6b4e3992cd 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -89,36 +89,6 @@ async def test_config_flow_user_initiated_success(hass: HomeAssistant) -> None: } -async def test_config_flow_user_initiated_connect_failure( - hass: HomeAssistant, mock_fibaro_client: Mock -) -> None: - """Connect failure in flow manually initialized by the user.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - mock_fibaro_client.connect.return_value = False - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_URL: TEST_URL, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} - - await _recovery_after_failure_works(hass, mock_fibaro_client, result) - - async def test_config_flow_user_initiated_auth_failure( hass: HomeAssistant, mock_fibaro_client: Mock ) -> None: From 66ea528e94222f33d081cf60919acf764827d45c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 09:43:19 +0200 Subject: [PATCH 234/426] Bump actions/checkout from 4.1.2 to 4.1.3 (#115945) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 12 +++++------ .github/workflows/ci.yaml | 34 +++++++++++++++--------------- .github/workflows/codeql.yml | 2 +- .github/workflows/translations.yml | 2 +- .github/workflows/wheels.yml | 6 +++--- 5 files changed, 28 insertions(+), 28 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 9d992608317..a440de225be 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -27,7 +27,7 @@ jobs: publish: ${{ steps.version.outputs.publish }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 with: fetch-depth: 0 @@ -90,7 +90,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' @@ -242,7 +242,7 @@ jobs: - green steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set build additional args run: | @@ -279,7 +279,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Initialize git uses: home-assistant/actions/helpers/git-init@master @@ -320,7 +320,7 @@ jobs: registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Install Cosign uses: sigstore/cosign-installer@v3.4.0 @@ -450,7 +450,7 @@ jobs: if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5f186c32e9a..581a36be953 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,7 +89,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Generate partial Python venv restore key id: generate_python_cache_key run: >- @@ -223,7 +223,7 @@ jobs: - info steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -269,7 +269,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -309,7 +309,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -348,7 +348,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -442,7 +442,7 @@ jobs: python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -513,7 +513,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -545,7 +545,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -578,7 +578,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -622,7 +622,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -694,7 +694,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -754,7 +754,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -869,7 +869,7 @@ jobs: ffmpeg \ libmariadb-dev-compat - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -991,7 +991,7 @@ jobs: ffmpeg \ postgresql-server-dev-14 - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1086,7 +1086,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.5 with: @@ -1132,7 +1132,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1219,7 +1219,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.5 with: diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2b9a2af127f..6a366a7ab8d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Initialize CodeQL uses: github/codeql-action/init@v3.25.1 diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index e61eef36f0b..3f0559de541 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 36a9fa1f839..24033a92fd5 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -28,7 +28,7 @@ jobs: architectures: ${{ steps.info.outputs.architectures }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Get information id: info @@ -88,7 +88,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Download env_file uses: actions/download-artifact@v4.1.5 @@ -126,7 +126,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.3 - name: Download env_file uses: actions/download-artifact@v4.1.5 From f927b27ed4ddfa497b47c11b989e325919e93512 Mon Sep 17 00:00:00 2001 From: Quentame Date: Mon, 22 Apr 2024 09:54:47 +0200 Subject: [PATCH 235/426] Add Epic Games Store integration (#104725) * Add Epic Games Store integration Squashed commit of the following PR: #81167 * Bump epicstore-api to 0.1.7 as it handle better error 1004 Thanks to https://github.com/SD4RK/epicstore_api/commit/d7469f7c99508c06b3867fecbcf291ebf86c4c72 * Use extra_state_attributes instead of overriding state_attributes * Review: change how config_flow.validate_input is handled * Use LanguageSelector and rename locale to language * Review: init-better use of hass.data.setdefault Co-authored-by: Joost Lekkerkerker * Review: don't need to update at init Co-authored-by: Joost Lekkerkerker * Revert "Review: don't need to update at init" --> not working otherwise This reverts commit 1445a87c8e9b7247f1c9835bf2e2d7297dd02586. * Review: fix config_flow.validate_input/retactor following lib bump * review: merge async_update function with event property Co-authored-by: Joost Lekkerkerker * hassfest * Fix duplicates data from applied comment review 5035055 * review: thanks to 5035055 async_add_entities update_before_add param is not required anymore Co-authored-by: Joost Lekkerkerker * Fix Christmas special "Holiday sale" case * gen_requirements_all * Use CONF_LANGUAGE from HA const * Move CalendarType to const * manifest: integration_type -> service Co-authored-by: Sid <27780930+autinerd@users.noreply.github.com> * calendar: remove date start/end assert Co-authored-by: Erik Montnemery * const: rename SUPPORTED_LANGUAGES * hassfest * config: Move to ConfigFlowResult * coordinator: main file comment Co-authored-by: Erik Montnemery * ruff & hassfest * review: do not guess country * Add @hacf-fr as codeowner * review: remove games extra_attrs Was dropped somehow: - 73c20f34803b0a0ec242bf0740494f17a68f6f59 review: move games extra_attrs to data service - other commit that removed the service part * review: remove unused error class was removed: - 040cf945bb5346b6d42b3782b5061a13fb7b1f6b --------- Co-authored-by: Joost Lekkerkerker Co-authored-by: Sid <27780930+autinerd@users.noreply.github.com> Co-authored-by: Erik Montnemery --- .coveragerc | 2 + CODEOWNERS | 2 + .../components/epic_games_store/__init__.py | 35 + .../components/epic_games_store/calendar.py | 97 + .../epic_games_store/config_flow.py | 96 + .../components/epic_games_store/const.py | 31 + .../epic_games_store/coordinator.py | 81 + .../components/epic_games_store/helper.py | 92 + .../components/epic_games_store/manifest.json | 10 + .../components/epic_games_store/strings.json | 38 + homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/epic_games_store/__init__.py | 1 + tests/components/epic_games_store/common.py | 31 + tests/components/epic_games_store/conftest.py | 44 + tests/components/epic_games_store/const.py | 25 + .../error_1004_attribute_not_found.json | 1026 ++++++++ .../fixtures/error_5222_wrong_country.json | 23 + .../epic_games_store/fixtures/free_games.json | 2189 +++++++++++++++++ .../free_games_christmas_special.json | 253 ++ .../fixtures/free_games_one.json | 658 +++++ .../epic_games_store/test_calendar.py | 162 ++ .../epic_games_store/test_config_flow.py | 142 ++ .../epic_games_store/test_helper.py | 74 + 26 files changed, 5125 insertions(+) create mode 100644 homeassistant/components/epic_games_store/__init__.py create mode 100644 homeassistant/components/epic_games_store/calendar.py create mode 100644 homeassistant/components/epic_games_store/config_flow.py create mode 100644 homeassistant/components/epic_games_store/const.py create mode 100644 homeassistant/components/epic_games_store/coordinator.py create mode 100644 homeassistant/components/epic_games_store/helper.py create mode 100644 homeassistant/components/epic_games_store/manifest.json create mode 100644 homeassistant/components/epic_games_store/strings.json create mode 100644 tests/components/epic_games_store/__init__.py create mode 100644 tests/components/epic_games_store/common.py create mode 100644 tests/components/epic_games_store/conftest.py create mode 100644 tests/components/epic_games_store/const.py create mode 100644 tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json create mode 100644 tests/components/epic_games_store/fixtures/error_5222_wrong_country.json create mode 100644 tests/components/epic_games_store/fixtures/free_games.json create mode 100644 tests/components/epic_games_store/fixtures/free_games_christmas_special.json create mode 100644 tests/components/epic_games_store/fixtures/free_games_one.json create mode 100644 tests/components/epic_games_store/test_calendar.py create mode 100644 tests/components/epic_games_store/test_config_flow.py create mode 100644 tests/components/epic_games_store/test_helper.py diff --git a/.coveragerc b/.coveragerc index ceff3384202..f6368de7d89 100644 --- a/.coveragerc +++ b/.coveragerc @@ -361,6 +361,8 @@ omit = homeassistant/components/environment_canada/weather.py homeassistant/components/envisalink/* homeassistant/components/ephember/climate.py + homeassistant/components/epic_games_store/__init__.py + homeassistant/components/epic_games_store/coordinator.py homeassistant/components/epion/__init__.py homeassistant/components/epion/coordinator.py homeassistant/components/epion/sensor.py diff --git a/CODEOWNERS b/CODEOWNERS index ef997cfa896..5dcf4b3df81 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -398,6 +398,8 @@ build.json @home-assistant/supervisor /homeassistant/components/environment_canada/ @gwww @michaeldavie /tests/components/environment_canada/ @gwww @michaeldavie /homeassistant/components/ephember/ @ttroy50 +/homeassistant/components/epic_games_store/ @hacf-fr @Quentame +/tests/components/epic_games_store/ @hacf-fr @Quentame /homeassistant/components/epion/ @lhgravendeel /tests/components/epion/ @lhgravendeel /homeassistant/components/epson/ @pszafer diff --git a/homeassistant/components/epic_games_store/__init__.py b/homeassistant/components/epic_games_store/__init__.py new file mode 100644 index 00000000000..af25eb98137 --- /dev/null +++ b/homeassistant/components/epic_games_store/__init__.py @@ -0,0 +1,35 @@ +"""The Epic Games Store integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import EGSCalendarUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.CALENDAR, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Epic Games Store from a config entry.""" + + coordinator = EGSCalendarUpdateCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/epic_games_store/calendar.py b/homeassistant/components/epic_games_store/calendar.py new file mode 100644 index 00000000000..75c448e8467 --- /dev/null +++ b/homeassistant/components/epic_games_store/calendar.py @@ -0,0 +1,97 @@ +"""Calendar platform for a Epic Games Store.""" + +from __future__ import annotations + +from collections import namedtuple +from datetime import datetime +from typing import Any + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, CalendarType +from .coordinator import EGSCalendarUpdateCoordinator + +DateRange = namedtuple("DateRange", ["start", "end"]) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the local calendar platform.""" + coordinator: EGSCalendarUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + entities = [ + EGSCalendar(coordinator, entry.entry_id, CalendarType.FREE), + EGSCalendar(coordinator, entry.entry_id, CalendarType.DISCOUNT), + ] + async_add_entities(entities) + + +class EGSCalendar(CoordinatorEntity[EGSCalendarUpdateCoordinator], CalendarEntity): + """A calendar entity by Epic Games Store.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: EGSCalendarUpdateCoordinator, + config_entry_id: str, + cal_type: CalendarType, + ) -> None: + """Initialize EGSCalendar.""" + super().__init__(coordinator) + self._cal_type = cal_type + self._attr_translation_key = f"{cal_type}_games" + self._attr_unique_id = f"{config_entry_id}-{cal_type}" + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, config_entry_id)}, + manufacturer="Epic Games Store", + name="Epic Games Store", + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + if event := self.coordinator.data[self._cal_type]: + return _get_calendar_event(event[0]) + return None + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Get all events in a specific time frame.""" + events = filter( + lambda game: _are_date_range_overlapping( + DateRange(start=game["discount_start_at"], end=game["discount_end_at"]), + DateRange(start=start_date, end=end_date), + ), + self.coordinator.data[self._cal_type], + ) + return [_get_calendar_event(event) for event in events] + + +def _get_calendar_event(event: dict[str, Any]) -> CalendarEvent: + """Return a CalendarEvent from an API event.""" + return CalendarEvent( + summary=event["title"], + start=event["discount_start_at"], + end=event["discount_end_at"], + description=f"{event['description']}\n\n{event['url']}", + ) + + +def _are_date_range_overlapping(range1: DateRange, range2: DateRange) -> bool: + """Return a CalendarEvent from an API event.""" + latest_start = max(range1.start, range2.start) + earliest_end = min(range1.end, range2.end) + delta = (earliest_end - latest_start).days + 1 + overlap = max(0, delta) + return overlap > 0 diff --git a/homeassistant/components/epic_games_store/config_flow.py b/homeassistant/components/epic_games_store/config_flow.py new file mode 100644 index 00000000000..2ae86060ba2 --- /dev/null +++ b/homeassistant/components/epic_games_store/config_flow.py @@ -0,0 +1,96 @@ +"""Config flow for Epic Games Store integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from epicstore_api import EpicGamesStoreAPI +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.selector import ( + CountrySelector, + LanguageSelector, + LanguageSelectorConfig, +) + +from .const import DOMAIN, SUPPORTED_LANGUAGES + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_LANGUAGE): LanguageSelector( + LanguageSelectorConfig(languages=SUPPORTED_LANGUAGES) + ), + vol.Required(CONF_COUNTRY): CountrySelector(), + } +) + + +def get_default_language(hass: HomeAssistant) -> str | None: + """Get default language code based on Home Assistant config.""" + language_code = f"{hass.config.language}-{hass.config.country}" + if language_code in SUPPORTED_LANGUAGES: + return language_code + if hass.config.language in SUPPORTED_LANGUAGES: + return hass.config.language + return None + + +async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None: + """Validate the user input allows us to connect.""" + api = EpicGamesStoreAPI(user_input[CONF_LANGUAGE], user_input[CONF_COUNTRY]) + data = await hass.async_add_executor_job(api.get_free_games) + + if data.get("errors"): + _LOGGER.warning(data["errors"]) + + assert data["data"]["Catalog"]["searchStore"]["elements"] + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Epic Games Store.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + data_schema = self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, + user_input + or { + CONF_LANGUAGE: get_default_language(self.hass), + CONF_COUNTRY: self.hass.config.country, + }, + ) + if user_input is None: + return self.async_show_form(step_id="user", data_schema=data_schema) + + await self.async_set_unique_id( + f"freegames-{user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]}" + ) + self._abort_if_unique_id_configured() + + errors = {} + + try: + await validate_input(self.hass, user_input) + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=f"Epic Games Store - Free Games ({user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]})", + data=user_input, + ) + + return self.async_show_form( + step_id="user", data_schema=data_schema, errors=errors + ) diff --git a/homeassistant/components/epic_games_store/const.py b/homeassistant/components/epic_games_store/const.py new file mode 100644 index 00000000000..c397698fd0c --- /dev/null +++ b/homeassistant/components/epic_games_store/const.py @@ -0,0 +1,31 @@ +"""Constants for the Epic Games Store integration.""" + +from enum import StrEnum + +DOMAIN = "epic_games_store" + +SUPPORTED_LANGUAGES = [ + "ar", + "de", + "en-US", + "es-ES", + "es-MX", + "fr", + "it", + "ja", + "ko", + "pl", + "pt-BR", + "ru", + "th", + "tr", + "zh-CN", + "zh-Hant", +] + + +class CalendarType(StrEnum): + """Calendar types.""" + + FREE = "free" + DISCOUNT = "discount" diff --git a/homeassistant/components/epic_games_store/coordinator.py b/homeassistant/components/epic_games_store/coordinator.py new file mode 100644 index 00000000000..d9c48f5da02 --- /dev/null +++ b/homeassistant/components/epic_games_store/coordinator.py @@ -0,0 +1,81 @@ +"""The Epic Games Store integration data coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Any + +from epicstore_api import EpicGamesStoreAPI + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, CalendarType +from .helper import format_game_data + +SCAN_INTERVAL = timedelta(days=1) + +_LOGGER = logging.getLogger(__name__) + + +class EGSCalendarUpdateCoordinator( + DataUpdateCoordinator[dict[str, list[dict[str, Any]]]] +): + """Class to manage fetching data from the Epic Game Store.""" + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize.""" + self._api = EpicGamesStoreAPI( + entry.data[CONF_LANGUAGE], + entry.data[CONF_COUNTRY], + ) + self.language = entry.data[CONF_LANGUAGE] + + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + + async def _async_update_data(self) -> dict[str, list[dict[str, Any]]]: + """Update data via library.""" + raw_data = await self.hass.async_add_executor_job(self._api.get_free_games) + _LOGGER.debug(raw_data) + data = raw_data["data"]["Catalog"]["searchStore"]["elements"] + + discount_games = filter( + lambda game: game.get("promotions") + and ( + # Current discount(s) + game["promotions"]["promotionalOffers"] + or + # Upcoming discount(s) + game["promotions"]["upcomingPromotionalOffers"] + ), + data, + ) + + return_data: dict[str, list[dict[str, Any]]] = { + CalendarType.DISCOUNT: [], + CalendarType.FREE: [], + } + for discount_game in discount_games: + game = format_game_data(discount_game, self.language) + + if game["discount_type"]: + return_data[game["discount_type"]].append(game) + + return_data[CalendarType.DISCOUNT] = sorted( + return_data[CalendarType.DISCOUNT], + key=lambda game: game["discount_start_at"], + ) + return_data[CalendarType.FREE] = sorted( + return_data[CalendarType.FREE], key=lambda game: game["discount_start_at"] + ) + + _LOGGER.debug(return_data) + return return_data diff --git a/homeassistant/components/epic_games_store/helper.py b/homeassistant/components/epic_games_store/helper.py new file mode 100644 index 00000000000..2510c7699e5 --- /dev/null +++ b/homeassistant/components/epic_games_store/helper.py @@ -0,0 +1,92 @@ +"""Helper for Epic Games Store.""" + +import contextlib +from typing import Any + +from homeassistant.util import dt as dt_util + + +def format_game_data(raw_game_data: dict[str, Any], language: str) -> dict[str, Any]: + """Format raw API game data for Home Assistant users.""" + img_portrait = None + img_landscape = None + + for image in raw_game_data["keyImages"]: + if image["type"] == "OfferImageTall": + img_portrait = image["url"] + if image["type"] == "OfferImageWide": + img_landscape = image["url"] + + current_promotions = raw_game_data["promotions"]["promotionalOffers"] + upcoming_promotions = raw_game_data["promotions"]["upcomingPromotionalOffers"] + + promotion_data = {} + if ( + current_promotions + and raw_game_data["price"]["totalPrice"]["discountPrice"] == 0 + ): + promotion_data = current_promotions[0]["promotionalOffers"][0] + else: + promotion_data = (current_promotions or upcoming_promotions)[0][ + "promotionalOffers" + ][0] + + return { + "title": raw_game_data["title"].replace("\xa0", " "), + "description": raw_game_data["description"].strip().replace("\xa0", " "), + "released_at": dt_util.parse_datetime(raw_game_data["effectiveDate"]), + "original_price": raw_game_data["price"]["totalPrice"]["fmtPrice"][ + "originalPrice" + ].replace("\xa0", " "), + "publisher": raw_game_data["seller"]["name"], + "url": get_game_url(raw_game_data, language), + "img_portrait": img_portrait, + "img_landscape": img_landscape, + "discount_type": ("free" if is_free_game(raw_game_data) else "discount") + if promotion_data + else None, + "discount_start_at": dt_util.parse_datetime(promotion_data["startDate"]) + if promotion_data + else None, + "discount_end_at": dt_util.parse_datetime(promotion_data["endDate"]) + if promotion_data + else None, + } + + +def get_game_url(raw_game_data: dict[str, Any], language: str) -> str: + """Format raw API game data for Home Assistant users.""" + url_bundle_or_product = "bundles" if raw_game_data["offerType"] == "BUNDLE" else "p" + url_slug: str | None = None + try: + url_slug = raw_game_data["offerMappings"][0]["pageSlug"] + except Exception: # pylint: disable=broad-except + with contextlib.suppress(Exception): + url_slug = raw_game_data["catalogNs"]["mappings"][0]["pageSlug"] + + if not url_slug: + url_slug = raw_game_data["urlSlug"] + + return f"https://store.epicgames.com/{language}/{url_bundle_or_product}/{url_slug}" + + +def is_free_game(game: dict[str, Any]) -> bool: + """Return if the game is free or will be free.""" + return ( + # Current free game(s) + game["promotions"]["promotionalOffers"] + and game["promotions"]["promotionalOffers"][0]["promotionalOffers"][0][ + "discountSetting" + ]["discountPercentage"] + == 0 + and + # Checking current price, maybe not necessary + game["price"]["totalPrice"]["discountPrice"] == 0 + ) or ( + # Upcoming free game(s) + game["promotions"]["upcomingPromotionalOffers"] + and game["promotions"]["upcomingPromotionalOffers"][0]["promotionalOffers"][0][ + "discountSetting" + ]["discountPercentage"] + == 0 + ) diff --git a/homeassistant/components/epic_games_store/manifest.json b/homeassistant/components/epic_games_store/manifest.json new file mode 100644 index 00000000000..665eaec6668 --- /dev/null +++ b/homeassistant/components/epic_games_store/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "epic_games_store", + "name": "Epic Games Store", + "codeowners": ["@hacf-fr", "@Quentame"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/epic_games_store", + "integration_type": "service", + "iot_class": "cloud_polling", + "requirements": ["epicstore-api==0.1.7"] +} diff --git a/homeassistant/components/epic_games_store/strings.json b/homeassistant/components/epic_games_store/strings.json new file mode 100644 index 00000000000..58a87a55f81 --- /dev/null +++ b/homeassistant/components/epic_games_store/strings.json @@ -0,0 +1,38 @@ +{ + "config": { + "step": { + "user": { + "data": { + "language": "Language", + "country": "Country" + } + } + }, + "error": { + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + }, + "entity": { + "calendar": { + "free_games": { + "name": "Free games", + "state_attributes": { + "games": { + "name": "Games" + } + } + }, + "discount_games": { + "name": "Discount games", + "state_attributes": { + "games": { + "name": "[%key:component::epic_games_store::entity::calendar::free_games::state_attributes::games::name%]" + } + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index c02d8a2987e..e5d5f37ad5a 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -152,6 +152,7 @@ FLOWS = { "enocean", "enphase_envoy", "environment_canada", + "epic_games_store", "epion", "epson", "eq3btsmart", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 2b1e5b4fb91..0ee796d5376 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1649,6 +1649,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "epic_games_store": { + "name": "Epic Games Store", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "epion": { "name": "Epion", "integration_type": "hub", diff --git a/requirements_all.txt b/requirements_all.txt index dade5079fbd..055db11d63a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -806,6 +806,9 @@ env-canada==0.6.0 # homeassistant.components.season ephem==4.1.5 +# homeassistant.components.epic_games_store +epicstore-api==0.1.7 + # homeassistant.components.epion epion==0.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d1bfeff488f..ff19a6a5c89 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -660,6 +660,9 @@ env-canada==0.6.0 # homeassistant.components.season ephem==4.1.5 +# homeassistant.components.epic_games_store +epicstore-api==0.1.7 + # homeassistant.components.epion epion==0.0.3 diff --git a/tests/components/epic_games_store/__init__.py b/tests/components/epic_games_store/__init__.py new file mode 100644 index 00000000000..1c5baf3704f --- /dev/null +++ b/tests/components/epic_games_store/__init__.py @@ -0,0 +1 @@ +"""Tests for the Epic Games Store integration.""" diff --git a/tests/components/epic_games_store/common.py b/tests/components/epic_games_store/common.py new file mode 100644 index 00000000000..95191ad97f9 --- /dev/null +++ b/tests/components/epic_games_store/common.py @@ -0,0 +1,31 @@ +"""Common methods used across tests for Epic Games Store.""" + +from unittest.mock import patch + +from homeassistant.components.epic_games_store.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import MOCK_COUNTRY, MOCK_LANGUAGE + +from tests.common import MockConfigEntry + + +async def setup_platform(hass: HomeAssistant, platform: str) -> MockConfigEntry: + """Set up the Epic Games Store platform.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + unique_id=f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}", + ) + mock_entry.add_to_hass(hass) + + with patch("homeassistant.components.epic_games_store.PLATFORMS", [platform]): + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + return mock_entry diff --git a/tests/components/epic_games_store/conftest.py b/tests/components/epic_games_store/conftest.py new file mode 100644 index 00000000000..e02997a429e --- /dev/null +++ b/tests/components/epic_games_store/conftest.py @@ -0,0 +1,44 @@ +"""Define fixtures for Epic Games Store tests.""" + +from unittest.mock import Mock, patch + +import pytest + +from .const import ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND, + DATA_FREE_GAMES, + DATA_FREE_GAMES_CHRISTMAS_SPECIAL, +) + + +@pytest.fixture(name="service_multiple") +def mock_service_multiple(): + """Mock a successful service with multiple free & discount games.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_FREE_GAMES) + yield service_mock + + +@pytest.fixture(name="service_christmas_special") +def mock_service_christmas_special(): + """Mock a successful service with Christmas special case.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_FREE_GAMES_CHRISTMAS_SPECIAL) + yield service_mock + + +@pytest.fixture(name="service_attribute_not_found") +def mock_service_attribute_not_found(): + """Mock a successful service returning a not found attribute error with free & discount games.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_ERROR_ATTRIBUTE_NOT_FOUND) + yield service_mock diff --git a/tests/components/epic_games_store/const.py b/tests/components/epic_games_store/const.py new file mode 100644 index 00000000000..dcd82c7e03e --- /dev/null +++ b/tests/components/epic_games_store/const.py @@ -0,0 +1,25 @@ +"""Test constants.""" + +from homeassistant.components.epic_games_store.const import DOMAIN + +from tests.common import load_json_object_fixture + +MOCK_LANGUAGE = "fr" +MOCK_COUNTRY = "FR" + +DATA_ERROR_ATTRIBUTE_NOT_FOUND = load_json_object_fixture( + "error_1004_attribute_not_found.json", DOMAIN +) + +DATA_ERROR_WRONG_COUNTRY = load_json_object_fixture( + "error_5222_wrong_country.json", DOMAIN +) + +# free games +DATA_FREE_GAMES = load_json_object_fixture("free_games.json", DOMAIN) + +DATA_FREE_GAMES_ONE = load_json_object_fixture("free_games_one.json", DOMAIN) + +DATA_FREE_GAMES_CHRISTMAS_SPECIAL = load_json_object_fixture( + "free_games_christmas_special.json", DOMAIN +) diff --git a/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json b/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json new file mode 100644 index 00000000000..6cb14608c2b --- /dev/null +++ b/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json @@ -0,0 +1,1026 @@ +{ + "errors": [ + { + "message": "CatalogOffer/offerMappings: Request failed with status code 404", + "locations": [ + { + "line": 73, + "column": 17 + } + ], + "correlationId": "0451aa13-b1d6-4f90-8ca5-d12bf917675a", + "serviceResponse": "{\"errorMessage\":\"The item or resource being requested could not be found.\",\"errorCode\":\"errors.com.epicgames.not_found\",\"numericErrorCode\":1004,\"errorStatus\":404}", + "stack": null, + "path": ["Catalog", "searchStore", "elements", 4, "offerMappings"] + }, + { + "message": "CatalogNamespace/mappings: Request failed with status code 404", + "locations": [ + { + "line": 68, + "column": 19 + } + ], + "correlationId": "0451aa13-b1d6-4f90-8ca5-d12bf917675a", + "serviceResponse": "{\"errorMessage\":\"The item or resource being requested could not be found.\",\"errorCode\":\"errors.com.epicgames.not_found\",\"numericErrorCode\":1004,\"errorStatus\":404}", + "stack": null, + "path": ["Catalog", "searchStore", "elements", 4, "catalogNs", "mappings"] + } + ], + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Godlike Burger", + "id": "d9300ace164b41ac90a7b54e59d47953", + "namespace": "beb7e64d3da74ae780405da48cccb581", + "description": "Dans Godlike Burger, vous g\u00e9rez le restaurant le plus d\u00e9ment de la galaxie\u00a0! Assommez, empoisonnez et tuez les clients... pour les transformer en steaks\u00a0! Mais nulle crainte\u00a0: la client\u00e8le alien reviendra si vous la jouez fine, car c'est trop bon de s'adonner au cannibalisme.", + "effectiveDate": "2022-04-21T17:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2022-03-28T18:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/godlike-burger-offer-1trpc.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/download-godlike-burger-offer-8u2uh.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/download-godlike-burger-offer-8u2uh.jpg" + } + ], + "seller": { + "id": "o-d2ygr9bjcjfebgt8842wvvbmswympz", + "name": "Daedalic Entertainment" + }, + "productSlug": null, + "urlSlug": "37b001690e2a4d6f872567cdd06f0c6f", + "url": null, + "items": [ + { + "id": "c027f1bc9db54f189ad938634500e542", + "namespace": "beb7e64d3da74ae780405da48cccb581" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1083" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1263" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "godlike-burger-4150a0", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "godlike-burger-4150a0", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "19,99\u00a0\u20ac", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "1c2dc8194022428da305eedb42ed574d", + "endDate": "2023-10-12T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-05T15:00:00.000Z", + "endDate": "2023-10-12T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Destiny\u00a02\u00a0: Pack 30e anniversaire Bungie", + "id": "e7b9e222c7274dd28714aba2e06d2a01", + "namespace": "428115def4ca4deea9d69c99c5a5a99e", + "description": "Le Pack 30e anniversaire inclut un nouveau donjon, le lance-roquettes exotique Gjallarhorn, de nouvelles armes et armures, et plus encore. ", + "effectiveDate": "2022-08-23T13:00:00.000Z", + "offerType": "DLC", + "expiryDate": null, + "viewableDate": "2022-08-08T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S4_1200x1600_1200x1600-04ebd49752c682d003014680f3d5be18" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S3_2560x1440_2560x1440-b2f882323923927c414ab23faf1022ca" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_OfferLogo_200x200_200x200-234225abe0aca2bfa7f5c5bc6e6fe348" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S4_1200x1600_1200x1600-04ebd49752c682d003014680f3d5be18" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S3_2560x1440_2560x1440-b2f882323923927c414ab23faf1022ca" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot1_1920x1080-37c070caa0106b08910518150bf96e94" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot2_1920x1080-14490e3ec01dceedce23d870774b2393" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot3_1920x1080-fdf882ad2cc98be7e63516b4ad28d6e9" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot4_1920x1080-079d4e12a8a04b31f7d4def7f4b745e7" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot5_1920x1080-f3c958c685629b6678544cba8bffc483" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot6_1920x1080-f13bb310baf9c158d15d473474c11586" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot7_1920x1080-6d2b714d2cfd64623cdcc39487d0b429" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot8_1920x1080-0956ff1a3a4969d9a3f2b96d87bdc19d" + } + ], + "seller": { + "id": "o-49lqsefbl6zr5sy3ztak77ej97cuvh", + "name": "Bungie" + }, + "productSlug": null, + "urlSlug": "destiny-2--bungie-30th-anniversary-pack", + "url": null, + "items": [ + { + "id": "904b57fb8bcd41a6be6c690a92ab3c15", + "namespace": "428115def4ca4deea9d69c99c5a5a99e" + } + ], + "customAttributes": [], + "categories": [ + { + "path": "addons" + }, + { + "path": "freegames" + }, + { + "path": "addons/durable" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1203" + }, + { + "id": "1210" + }, + { + "id": "1370" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "destiny-2", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "destiny-2--bungie-30th-anniversary-pack", + "pageType": "addon--cms-hybrid" + } + ], + "price": { + "totalPrice": { + "discountPrice": 2499, + "originalPrice": 2499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "24,99\u00a0\u20ac", + "discountPrice": "24,99\u00a0\u20ac", + "intermediatePrice": "24,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-11T16:00:00.000Z", + "endDate": "2023-10-25T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + } + ] + } + ] + } + }, + { + "title": "Gloomhaven", + "id": "9232fdbc352445cc820a54bdc97ed2bb", + "namespace": "bc079f73f020432fac896d30c8e2c330", + "description": "Que vous soyez arriv\u00e9s \u00e0 Gloomhaven en r\u00e9pondant \u00e0 l'appel de l'aventure ou au d\u00e9sir cupide de l'\u00e9clat de l'or, votre destin n'en sera pas chang\u00e9...", + "effectiveDate": "2022-09-22T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2022-09-22T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/gloomhaven-offer-1j9mc.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/download-gloomhaven-offer-1ho2x.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/download-gloomhaven-offer-1ho2x.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "0d48da287df14493a7415b560ec1bbb3", + "url": null, + "items": [ + { + "id": "6047532dd78a456593d0ffd6602a7218", + "namespace": "bc079f73f020432fac896d30c8e2c330" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetViewableDate", + "value": "true" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + }, + { + "key": "isBlockchainUsed", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "29088" + }, + { + "id": "21122" + }, + { + "id": "1188" + }, + { + "id": "21127" + }, + { + "id": "19847" + }, + { + "id": "21129" + }, + { + "id": "1386" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1264" + }, + { + "id": "21137" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "16979" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1367" + }, + { + "id": "22776" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21147" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "gloomhaven-92f741", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "gloomhaven-92f741", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 3499, + "originalPrice": 3499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "34,99\u00a0\u20ac", + "discountPrice": "34,99\u00a0\u20ac", + "intermediatePrice": "34,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "911 Operator", + "id": "268fd6ea355740d6ba4c76c3ffd4cbe0", + "namespace": "d923c737f0d243ccab407605ea40d39e", + "description": "911 OPERATOR est un jeu o\u00f9 tu deviens op\u00e9rateur de la ligne des urgences et o\u00f9 tu r\u00e9sous des incidents en fournissant des instruction et en g\u00e9rant des \u00e9quipes de secours. Tu peux jouer sur la carte de n\u2019importe quelle ville* du monde!", + "effectiveDate": "2023-09-14T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2023-09-07T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-omkv7.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-8dcp7.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-8dcp7.jpg" + } + ], + "seller": { + "id": "o-8dv8wz77w8tqnymmm8e99p28eny7kg", + "name": "Games Operators S.A." + }, + "productSlug": null, + "urlSlug": "ecb09cc5f55345e6bf6d3d9354c12876", + "url": null, + "items": [ + { + "id": "07499df5530b45c3ad8464a96cbe26c7", + "namespace": "d923c737f0d243ccab407605ea40d39e" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetViewableDate", + "value": "true" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + }, + { + "key": "isBlockchainUsed", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1393" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "1263" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "911-operator-585edd", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "911-operator-585edd", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1349, + "originalPrice": 1349, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "13,49\u00a0\u20ac", + "discountPrice": "13,49\u00a0\u20ac", + "intermediatePrice": "13,49\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-23T14:00:00.000Z", + "endDate": "2023-10-30T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 50 + } + } + ] + } + ] + } + }, + { + "title": "Q.U.B.E. ULTIMATE BUNDLE", + "id": "f18f14a76a874aa883a651fcc8c513d0", + "namespace": "0712c5eca64b47bbbced82cabba9f0d7", + "description": "Q.U.B.E. ULTIMATE BUNDLE", + "effectiveDate": "2023-10-12T15:00:00.000Z", + "offerType": "BUNDLE", + "expiryDate": null, + "viewableDate": "2023-10-05T14:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Portrait_V2_1200x1600-981ac683de50fd5afed2c87dbc26494a" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Landscape_V2_2560x1440-50dbecaa32e134e246717f8a5e60ad25" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Logo_V2_400x400-99dcb7d141728efbe2b1b4e993ce6339" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Portrait_V2_1200x1600-981ac683de50fd5afed2c87dbc26494a" + } + ], + "seller": { + "id": "o-kk34ewvmscclj5a2ukx49ff6qknn7a", + "name": "Ten Hut Games" + }, + "productSlug": "qube-ultimate-bundle", + "urlSlug": "qube-ultimate-bundle", + "url": null, + "items": [ + { + "id": "11d229f51ac1445a8925b8d14da82b9b", + "namespace": "ad43401ad02840c2b2bee5f1f1a59988" + }, + { + "id": "0e7ec1d579ab481c93dff6056c19299f", + "namespace": "4b5f1eb366dc45f0920d397c01b291ba" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "qube-ultimate-bundle" + } + ], + "categories": [ + { + "path": "bundles" + }, + { + "path": "freegames" + }, + { + "path": "bundles/games" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "1298" + }, + { + "id": "1203" + }, + { + "id": "1117" + }, + { + "id": "1294" + } + ], + "catalogNs": { + "mappings": null + }, + "offerMappings": null, + "price": { + "totalPrice": { + "discountPrice": 4499, + "originalPrice": 4499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "44,99\u00a0\u20ac", + "discountPrice": "44,99\u00a0\u20ac", + "intermediatePrice": "44,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-12T15:00:00.000Z", + "endDate": "2023-10-19T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "PAYDAY 2", + "id": "de434b7be57940d98ede93b50cdacfc2", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "PAYDAY 2 is an action-packed, four-player co-op shooter that once again lets gamers don the masks of the original PAYDAY crew - Dallas, Hoxton, Wolf and Chains - as they descend on Washington DC for an epic crime spree.", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "viewableDate": "2023-06-01T14:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/mammoth-h1nvv_2560x1440-ac346d6ece5ec356561e112fbddb2dc1" + }, + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/EN-mega-sale-vault-16x9-asset_1920x1080-a27cf3919dde320a72936374a1d47813" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "payday-2-c66369", + "urlSlug": "mystery-game-7", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/mega-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "payday-2-c66369" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Blazing Sails", + "id": "363d0be3b57d4741a046d38da0e6355e", + "namespace": "aee7dd76aa6746578f476dc47f8d1d7f", + "description": "Survivez \u00e0 Blazing Sails, un jeu de pirate en JcJ tr\u00e9pidant\u00a0! Cr\u00e9ez votre navire et vos pirates uniques. Naviguez en \u00e9quipe avec d'autres joueurs\u00a0! D\u00e9couvrez diff\u00e9rents modes de jeu, cartes, armes, types de navires et bien plus encore. Battez les \u00e9quipages adverses dans d'\u00e9piques combats sur terre et en mer\u00a0!", + "effectiveDate": "2099-04-06T17:35:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2023-03-30T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S2_1200x1600-bae3831e97b560958dc785e830ebed8c" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S1_2560x1440-fd7a7b3d357555880cb7969634553c5b" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_IC1_400x400-a7b91f257fcbd9ced825d3da95298170" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S2_1200x1600-bae3831e97b560958dc785e830ebed8c" + } + ], + "seller": { + "id": "o-ftmts7pjfvdywkby885rdzl4hdbtys", + "name": "Iceberg Interactive" + }, + "productSlug": "blazing-sails", + "urlSlug": "blazing-sails", + "url": null, + "items": [ + { + "id": "30aec28f450a41499dd27e0d27294b56", + "namespace": "aee7dd76aa6746578f476dc47f8d1d7f" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "KR" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "blazing-sails" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "1264" + }, + { + "id": "1203" + }, + { + "id": "9547" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "blazing-sails", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1479, + "originalPrice": 1479, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "14,79\u00a0\u20ac", + "discountPrice": "14,79\u00a0\u20ac", + "intermediatePrice": "14,79\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-12T15:00:00.000Z", + "endDate": "2023-10-19T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + } + ], + "paging": { + "count": 1000, + "total": 7 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json b/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json new file mode 100644 index 00000000000..c91d5551ff9 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json @@ -0,0 +1,23 @@ +{ + "errors": [ + { + "message": "CatalogQuery/searchStore: Request failed with status code 400", + "locations": [ + { + "line": 18, + "column": 9 + } + ], + "correlationId": "e10ad58e-a4f9-4097-af5d-cafdbe0d8bbd", + "serviceResponse": "{\"errorCode\":\"errors.com.epicgames.catalog.invalid_country_code\",\"errorMessage\":\"Sorry the value you entered: en-US, does not appear to be a valid ISO country code.\",\"messageVars\":[\"en-US\"],\"numericErrorCode\":5222,\"originatingService\":\"com.epicgames.catalog.public\",\"intent\":\"prod\",\"errorStatus\":400}", + "stack": null, + "path": ["Catalog", "searchStore"] + } + ], + "data": { + "Catalog": { + "searchStore": null + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games.json b/tests/components/epic_games_store/fixtures/free_games.json new file mode 100644 index 00000000000..29ff43f32a0 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games.json @@ -0,0 +1,2189 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Rising Storm 2: Vietnam", + "id": "b19d810d322240e7b37bcf84ffac60ce", + "namespace": "3542a1df211e492bb2abecb7c734f7f9", + "description": "Red Orchestra Series' take on Vietnam: 64-player MP matches; 20+ maps; US Army & Marines, PAVN/NVA, NLF/VC; Australians and ARVN forces; 50+ weapons; 4 flyable helicopters; mines, traps and tunnels; Brutal. Authentic. Gritty. Character customization.", + "effectiveDate": "2020-10-08T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S3-2560x1440-e08edd93cb71bf15b50a74f3de2d17b0.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S3-2560x1440-e08edd93cb71bf15b50a74f3de2d17b0.jpg" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + } + ], + "seller": { + "id": "o-2baznhy8tfh7fmyb55ul656v7ggt7r", + "name": "Tripwire Interactive" + }, + "productSlug": "rising-storm-2-vietnam/home", + "urlSlug": "risingstorm2vietnam", + "url": null, + "items": [ + { + "id": "685765c3f37049c49b45bea4173725d2", + "namespace": "3542a1df211e492bb2abecb7c734f7f9" + }, + { + "id": "c7c6d65ac4cc4ef0ae12e8e89f134684", + "namespace": "3542a1df211e492bb2abecb7c734f7f9" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "publisherName", + "value": "Tripwire Interactive" + }, + { + "key": "developerName", + "value": "Antimatter Games" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "rising-storm-2-vietnam/home" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21122" + }, + { + "id": "21125" + }, + { + "id": "21129" + }, + { + "id": "14346" + }, + { + "id": "9547" + }, + { + "id": "16011" + }, + { + "id": "15375" + }, + { + "id": "21135" + }, + { + "id": "21138" + }, + { + "id": "1299" + }, + { + "id": "16979" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "17493" + }, + { + "id": "21141" + }, + { + "id": "22485" + }, + { + "id": "18777" + }, + { + "id": "18778" + }, + { + "id": "1115" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "14944" + }, + { + "id": "19242" + }, + { + "id": "18607" + }, + { + "id": "1203" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "rising-storm-2-vietnam", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 2199, + "originalPrice": 2199, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac21.99", + "discountPrice": "\u20ac21.99", + "intermediatePrice": "\u20ac21.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-11-03T15:00:00.000Z", + "endDate": "2022-11-10T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "Idle Champions of the Forgotten Realms", + "id": "a9748abde1c94b66aae5250bb9fc5503", + "namespace": "7e508f543b05465abe3a935960eb70ac", + "description": "Idle Champions is a licensed Dungeons & Dragons strategy management video game uniting iconic characters from novels, campaigns, and shows into one epic adventure.", + "effectiveDate": "2021-02-16T17:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S1_2560x1440-e2a1ffd224f443594d5deff3a47a45e2" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S1_2560x1440-e2a1ffd224f443594d5deff3a47a45e2" + } + ], + "seller": { + "id": "o-3kpjwtwqwfl2p9wdwvpad7yqz4kt6c", + "name": "Codename Entertainment" + }, + "productSlug": "idle-champions-of-the-forgotten-realms", + "urlSlug": "banegeneralaudience", + "url": null, + "items": [ + { + "id": "9a4e1a1eb6b140f6a9e5e4dcb5a2bf55", + "namespace": "7e508f543b05465abe3a935960eb70ac" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "KR" + }, + { + "key": "publisherName", + "value": "Codename Entertainment" + }, + { + "key": "developerName", + "value": "Codename Entertainment" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "idle-champions-of-the-forgotten-realms" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21136" + }, + { + "id": "21122" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "1188" + }, + { + "id": "1141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "idle-champions-of-the-forgotten-realms", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Hundred Days - Winemaking Simulator", + "id": "141eee80fbe041d48e16e7b998829295", + "namespace": "4d8b727a49144090b103f6b6ba471e71", + "description": "Winemaking could be your best adventure. Make the best wine interacting with soil and nature and take your winery to the top. Your beautiful journey into the winemaking tradition starts now.", + "effectiveDate": "2021-05-13T14:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_G1C_00-1920x1080-0ffeb0645f0badb615627b481b4a913e.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S1-2560x1440-8f0dd95b6027cd1243361d430b3bf552.jpg" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + } + ], + "seller": { + "id": "o-ty5rvlnsbgdnfffytsywat86gcedkm", + "name": "Broken Arms Games srls" + }, + "productSlug": "hundred-days-winemaking-simulator", + "urlSlug": "hundred-days-winemaking-simulator", + "url": null, + "items": [ + { + "id": "03cacb8754f243bfbc536c9dda0eb32e", + "namespace": "4d8b727a49144090b103f6b6ba471e71" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "developerName", + "value": "Broken Arms Games" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "hundred-days-winemaking-simulator" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "19242" + }, + { + "id": "21130" + }, + { + "id": "16011" + }, + { + "id": "9547" + }, + { + "id": "1263" + }, + { + "id": "15375" + }, + { + "id": "18607" + }, + { + "id": "1393" + }, + { + "id": "21138" + }, + { + "id": "16979" + }, + { + "id": "21140" + }, + { + "id": "17493" + }, + { + "id": "21141" + }, + { + "id": "18777" + }, + { + "id": "1370" + }, + { + "id": "18778" + }, + { + "id": "21146" + }, + { + "id": "1115" + }, + { + "id": "21149" + }, + { + "id": "10719" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "hundred-days-winemaking-simulator", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac19.99", + "intermediatePrice": "\u20ac19.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Shadow of the Tomb Raider: Definitive Edition", + "id": "ee7f3c6725fd4fd4b8aeab8622cb770e", + "namespace": "4b5461ca8d1c488787b5200b420de066", + "description": "In Shadow of the Tomb Raider Definitive Edition experience the final chapter of Lara\u2019s origin as she is forged into the Tomb Raider she is destined to be.", + "effectiveDate": "2021-12-30T16:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s1-2560x1440-eca6506e95a1_2560x1440-193582a5fd76a593804e0171d6395cf4" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s1-2560x1440-eca6506e95a1_2560x1440-193582a5fd76a593804e0171d6395cf4" + } + ], + "seller": { + "id": "o-7petn7mrlk8g86ktqm7uglcr7lfaja", + "name": "Square Enix" + }, + "productSlug": "shadow-of-the-tomb-raider", + "urlSlug": "shadow-of-the-tomb-raider", + "url": null, + "items": [ + { + "id": "e7f90759e0544e42be9391d10a5c6000", + "namespace": "4b5461ca8d1c488787b5200b420de066" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "shadow-of-the-tomb-raider" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21122" + }, + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21109" + }, + { + "id": "21141" + }, + { + "id": "22485" + }, + { + "id": "1370" + }, + { + "id": "21146" + }, + { + "id": "1117" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "shadow-of-the-tomb-raider", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1319, + "originalPrice": 3999, + "voucherDiscount": 0, + "discount": 2680, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac39.99", + "discountPrice": "\u20ac13.19", + "intermediatePrice": "\u20ac13.19" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "35111a3c715340d08910a9f6a5b3e846", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 33 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Terraforming Mars", + "id": "f2496286331e405793d69807755b7b23", + "namespace": "25d726130e6c4fe68f88e71933bda955", + "description": "The taming of the Red Planet has begun!\n\nControl your corporation, play project cards, build up production, place your cities and green areas on the map, and race for milestones and awards!\n\nWill your corporation lead the way into humanity's new era?", + "effectiveDate": "2022-05-05T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/terraforming-mars-offer-1j70f.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/download-terraforming-mars-offer-13t2e.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/download-terraforming-mars-offer-13t2e.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "24cdfcde68bf4a7e8b8618ac2c0c460b", + "url": null, + "items": [ + { + "id": "ee49486d7346465dba1f1dec85725aee", + "namespace": "25d726130e6c4fe68f88e71933bda955" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21125" + }, + { + "id": "1386" + }, + { + "id": "9547" + }, + { + "id": "21138" + }, + { + "id": "1203" + }, + { + "id": "1299" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "terraforming-mars-18c3ad", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "terraforming-mars-18c3ad", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1399, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 600, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac13.99", + "intermediatePrice": "\u20ac13.99" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "8e9732952e714f6583416e66fc451cd7", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 70 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Car Mechanic Simulator 2018", + "id": "5eb27cf1747c40b5a0d4f5492774678d", + "namespace": "226306adde104c9092247dcd4bfa1499", + "description": "Build and expand your repair service empire in this incredibly detailed and highly realistic simulation game, where attention to car detail is astonishing. Find classic, unique cars in the new Barn Find module and Junkyard module.", + "effectiveDate": "2022-06-23T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S2_1200x1600-f285924f9144353f57ac4631f0c689e6" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S1_2560x1440-3489ef1499e64c168fdf4b14926d2c23" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S2_1200x1600-f285924f9144353f57ac4631f0c689e6" + } + ], + "seller": { + "id": "o-5n5cbrasl5yzexjc529rypg8eh8lfb", + "name": "PlayWay" + }, + "productSlug": "car-mechanic-simulator-2018", + "urlSlug": "car-mechanic-simulator-2018", + "url": null, + "items": [ + { + "id": "49a3a8597c4240ecaf1f9068106c9869", + "namespace": "226306adde104c9092247dcd4bfa1499" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "car-mechanic-simulator-2018" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21120" + }, + { + "id": "1188" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "1393" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "21146" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "car-mechanic-simulator-2018", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1599, + "originalPrice": 1599, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac15.99", + "discountPrice": "\u20ac15.99", + "intermediatePrice": "\u20ac15.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "A Game Of Thrones: The Board Game Digital Edition", + "id": "a125d72a47a1490aba78c4e79a40395d", + "namespace": "1b737464d3c441f8956315433be02d3b", + "description": "It is the digital adaptation of the top-selling strategy board game from Fantasy Flight Games.", + "effectiveDate": "2022-06-23T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/a-game-of-thrones-offer-11gxu.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/download-a-game-of-thrones-offer-1q8ei.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/download-a-game-of-thrones-offer-1q8ei.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "ce6f7ab4edab4cc2aa7e0ff4c19540e2", + "url": null, + "items": [ + { + "id": "dc6ae31efba7401fa72ed93f0bd37c6a", + "namespace": "1b737464d3c441f8956315433be02d3b" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21125" + }, + { + "id": "9547" + }, + { + "id": "21138" + }, + { + "id": "1203" + }, + { + "id": "1299" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "a-game-of-thrones-5858a3", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "a-game-of-thrones-5858a3", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1399, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 600, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac13.99", + "intermediatePrice": "\u20ac13.99" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "689de276cf3245a7bffdfa0d20500150", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 70 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Filament", + "id": "296453e71c884f95aecf4d582cf66915", + "namespace": "89fb09a222a54e53b692e9c36e68d0a1", + "description": "Solve challenging cable-based puzzles and uncover what really happened to the crew of The Alabaster. Now with Hint System (for those ultra tricky puzzles).", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/filament-offer-qrwye.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/download-filament-offer-mk58q.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/download-filament-offer-mk58q.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "323de464947e4ee5a035c525b6b78021", + "url": null, + "items": [ + { + "id": "d4fa1325ef014725a89cc40e9b99e43d", + "namespace": "89fb09a222a54e53b692e9c36e68d0a1" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "1298" + }, + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1263" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "filament-332a92", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "filament-332a92", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1699, + "originalPrice": 1699, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac16.99", + "discountPrice": "\u20ac16.99", + "intermediatePrice": "\u20ac16.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-11-03T15:00:00.000Z", + "endDate": "2022-11-10T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "Warhammer 40,000: Mechanicus - Standard Edition", + "id": "559b16fa81134dce83b5b8b7cf67b5b3", + "namespace": "144f9e231e2846d1a4381d9bb678f69d", + "description": "Take control of the most technologically advanced army in the Imperium - The Adeptus Mechanicus. Your every decision will weigh heavily on the outcome of the mission, in this turn-based tactical game. Will you be blessed by the Omnissiah?", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/warhammer-mechanicus-offer-17fnz.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/download-warhammer-mechanicus-offer-1f6bv.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/download-warhammer-mechanicus-offer-1f6bv.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "f37159d9bd96489ab1b99bdad1ee796c", + "url": null, + "items": [ + { + "id": "f923ad9f3428472ab67baa4618c205a0", + "namespace": "144f9e231e2846d1a4381d9bb678f69d" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1386" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "9549" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "warhammer-mechanicus-0e4b71", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "warhammer-mechanicus-0e4b71", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 2999, + "voucherDiscount": 0, + "discount": 2999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac29.99", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "7a3ee39632f5458990b6a9ad295881b8", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Fallout 3: Game of the Year Edition", + "id": "d6f01b1827c64ed388191ae507fe7c1b", + "namespace": "fa702d34a37248ba98fb17f680c085e3", + "description": "Prepare for the Future\u2122\nExperience the most acclaimed game of 2008 like never before with Fallout 3: Game of the Year Edition. Create a character of your choosing and descend into a post-apocalyptic world where every minute is a fight for survival", + "effectiveDate": "2022-10-20T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S2_1200x1600-e2ba392652a1f57c4feb65d6bbd1f963" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S1_2560x1440-073f5b4cf358f437a052a3c29806efa0" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_IC1_400x400-5e37dfe1d35c9ccf25c8889fe7218613" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S2_1200x1600-e2ba392652a1f57c4feb65d6bbd1f963" + } + ], + "seller": { + "id": "o-bthbhn6wd7fzj73v5p4436ucn3k37u", + "name": "Bethesda Softworks LLC" + }, + "productSlug": "fallout-3-game-of-the-year-edition", + "urlSlug": "fallout-3-game-of-the-year-edition", + "url": null, + "items": [ + { + "id": "6b750e631e414927bde5b3e13b647443", + "namespace": "fa702d34a37248ba98fb17f680c085e3" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "fallout-3-game-of-the-year-edition" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21122" + }, + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "21137" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "1307" + }, + { + "id": "21147" + }, + { + "id": "21148" + }, + { + "id": "1117" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "fallout-3-game-of-the-year-edition", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 659, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1340, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac6.59", + "intermediatePrice": "\u20ac6.59" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "779554ee7a604b0091a4335a60b6e55a", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 33 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Evoland Legendary Edition", + "id": "e068e168886a4a90a4e36a310e3bda32", + "namespace": "3f7bd21610f743e598fa8e955500f5b7", + "description": "Evoland Legendary Edition brings you two great and unique RPGs, with their graphic style and gameplay changing as you progress through the game!", + "effectiveDate": "2022-10-20T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1y7m0.png" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1j93v.png" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1j93v.png" + } + ], + "seller": { + "id": "o-ealhln64lfep9ww929uq9qcdmbyfn4", + "name": "Shiro Games SAS" + }, + "productSlug": null, + "urlSlug": "224c60bb93864e1c8a1900bcf7d661dd", + "url": null, + "items": [ + { + "id": "c829f27d0ab0406db8edf2b97562ee93", + "namespace": "3f7bd21610f743e598fa8e955500f5b7" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition" + }, + { + "path": "games" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21109" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + }, + { + "id": "9549" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "evoland-legendary-edition-5753ec", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "evoland-legendary-edition-5753ec", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac19.99", + "intermediatePrice": "\u20ac19.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Saturnalia", + "id": "275d5915ebd2479f983f51025b22a1b8", + "namespace": "c749cd78da34408d8434a46271f4bb79", + "description": "A Survival Horror Adventure: as an ensemble cast, explore an isolated village of ancient ritual \u2013 its labyrinthine roads change each time you lose all your characters.", + "effectiveDate": "2022-10-27T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S3_2560x1440-3cd916a7260b77c8488f8f2b0f3a51ab" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S3_2560x1440-3cd916a7260b77c8488f8f2b0f3a51ab" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + } + ], + "seller": { + "id": "o-cjwnkas5rn476tzk72fbh2ftutnc2y", + "name": "Santa Ragione" + }, + "productSlug": "saturnalia", + "urlSlug": "saturnalia", + "url": null, + "items": [ + { + "id": "dbce8ecb6923490c9404529651251216", + "namespace": "c749cd78da34408d8434a46271f4bb79" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "saturnalia" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1218" + }, + { + "id": "19847" + }, + { + "id": "1080" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "saturnalia", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "8fa8f62eac9e4cab9fe242987c0f0988", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Maneater", + "id": "a22a7af179c54b86a93f3193ace8f7f4", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Maneater", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-carousel-mobile-thumbnail-1200x1600_1200x1600-1f45bf1ceb21c1ca2947f6df5ece5346" + }, + { + "type": "VaultOpened", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "maneater", + "urlSlug": "game-4", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "free-games" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "maneater" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Wolfenstein: The New Order", + "id": "1d41b93230e54bdd80c559d72adb7f4f", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Wolfenstein: The New Order", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-carousel-mobile-thumbnail-1200x1600_1200x1600-1f45bf1ceb21c1ca2947f6df5ece5346" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + }, + { + "type": "VaultOpened", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "wolfenstein-the-new-order", + "urlSlug": "game-3", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "free-games" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "wolfenstein-the-new-order" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + } + ], + "paging": { + "count": 1000, + "total": 14 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games_christmas_special.json b/tests/components/epic_games_store/fixtures/free_games_christmas_special.json new file mode 100644 index 00000000000..0c65f47d3a0 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games_christmas_special.json @@ -0,0 +1,253 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Cursed to Golf", + "id": "0e4551e4ae65492b88009f8a4e41d778", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Cursed to Golf", + "effectiveDate": "2023-12-27T16:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": "2023-12-28T16:00:00.000Z", + "viewableDate": "2023-12-26T15:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-9_1920x1080-418a8fa10dd305bb2a219a7ec869c5ef" + }, + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-9-teaser_1920x1080-e71ae0041736db5ac259a355cb301116" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "cursed-to-golf-a6bc22", + "urlSlug": "mysterygame-9", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "cursed-to-golf-a6bc22" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-12-27T16:00:00.000Z", + "endDate": "2023-12-28T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + }, + { + "startDate": "2023-12-27T16:00:00.000Z", + "endDate": "2023-12-28T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Mystery Game Day 10", + "id": "a8c3537a579943a688e3bd355ae36209", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Mystery Game Day 10", + "effectiveDate": "2099-01-01T16:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "viewableDate": "2023-12-27T15:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-10-teaser_1920x1080-3ea48042a44263bf1a0a59c725b6d95b" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-10-teaser_1920x1080-3ea48042a44263bf1a0a59c725b6d95b" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "[]", + "urlSlug": "mysterygame-10", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "[]" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-12-28T16:00:00.000Z", + "endDate": "2023-12-29T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + } + ], + "paging": { + "count": 1000, + "total": 2 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games_one.json b/tests/components/epic_games_store/fixtures/free_games_one.json new file mode 100644 index 00000000000..48cd64f68d4 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games_one.json @@ -0,0 +1,658 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Borderlands 3 Season Pass", + "id": "c3913a91e07b43cfbbbcfd8244c86dcc", + "namespace": "catnip", + "description": "Prolongez votre aventure dans Borderlands\u00a03 avec le Season Pass, regroupant des \u00e9l\u00e9ments cosm\u00e9tiques exclusifs et quatre histoires additionnelles, pour encore plus de missions et de d\u00e9fis\u00a0!", + "effectiveDate": "2019-09-11T12:00:00.000Z", + "offerType": "DLC", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/catnip/Diesel_productv2_borderlands-3_season-pass_BL3_SEASONPASS_Hero-3840x2160-4411e63a005a43811a2bc516ae7ec584598fd4aa-3840x2160-b8988ebb0f3d9159671e8968af991f30_3840x2160-b8988ebb0f3d9159671e8968af991f30" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + } + ], + "seller": { + "id": "o-37m6jbj5wcvrcvm4wusv7nazdfvbjk", + "name": "2K Games, Inc." + }, + "productSlug": "borderlands-3/season-pass", + "urlSlug": "borderlands-3--season-pass", + "url": null, + "items": [ + { + "id": "e9fdc1a9f47b4a5e8e63841c15de2b12", + "namespace": "catnip" + }, + { + "id": "fbc46bb6056940d2847ee1e80037a9af", + "namespace": "catnip" + }, + { + "id": "ff8e1152ddf742b68f9ac0cecd378917", + "namespace": "catnip" + }, + { + "id": "939e660825764e208938ab4f26b4da56", + "namespace": "catnip" + }, + { + "id": "4c43a9a691114ccd91c1884ab18f4e27", + "namespace": "catnip" + }, + { + "id": "3a6a3f9b351b4b599808df3267669b83", + "namespace": "catnip" + }, + { + "id": "ab030a9f53f3428fb2baf2ddbb0bb5ac", + "namespace": "catnip" + }, + { + "id": "ff96eef22b0e4c498e8ed80ac0030325", + "namespace": "catnip" + }, + { + "id": "5021e93a73374d6db1c1ce6c92234f8f", + "namespace": "catnip" + }, + { + "id": "9c0b1eb3265340678dff0fcb106402b1", + "namespace": "catnip" + }, + { + "id": "8c826db6e14f44aeac8816e1bd593632", + "namespace": "catnip" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "SA" + }, + { + "key": "publisherName", + "value": "2K" + }, + { + "key": "developerName", + "value": "Gearbox Software" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "borderlands-3/season-pass" + } + ], + "categories": [ + { + "path": "addons" + }, + { + "path": "freegames" + }, + { + "path": "addons/durable" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1264" + }, + { + "id": "16004" + }, + { + "id": "14869" + }, + { + "id": "26789" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1294" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "borderlands-3", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "borderlands-3--season-pass", + "pageType": "addon--cms-hybrid" + } + ], + "price": { + "totalPrice": { + "discountPrice": 4999, + "originalPrice": 4999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "49,99\u00a0\u20ac", + "discountPrice": "49,99\u00a0\u20ac", + "intermediatePrice": "49,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 30 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 30 + } + } + ] + } + ] + } + }, + { + "title": "Call of the Sea", + "id": "92da5d8d918543b6b408e36d9af81765", + "namespace": "5e427319eea1401ab20c6cd78a4163c4", + "description": "Call of the Sea is an otherworldly tale of mystery and love set in the 1930s South Pacific. Explore a lush island paradise, solve puzzles and unlock secrets in the hunt for your husband\u2019s missing expedition.", + "effectiveDate": "2022-02-17T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S1_2560x1440-204699c6410deef9c18be0ee392f8335" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S5_1920x1080-7b22dfebdd9fcdde6e526c5dc4c16eb1" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + } + ], + "seller": { + "id": "o-fay4ghw9hhamujs53rfhy83ffexb7k", + "name": "Raw Fury" + }, + "productSlug": "call-of-the-sea", + "urlSlug": "call-of-the-sea", + "url": null, + "items": [ + { + "id": "cbc9c76c4bfc4bc6b28abb3afbcbf07a", + "namespace": "5e427319eea1401ab20c6cd78a4163c4" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "call-of-the-sea" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1296" + }, + { + "id": "1298" + }, + { + "id": "21894" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "call-of-the-sea", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "19,99\u00a0\u20ac", + "discountPrice": "19,99\u00a0\u20ac", + "intermediatePrice": "19,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + } + ] + } + ] + } + }, + { + "title": "Rise of Industry", + "id": "c04a2ab8ff4442cba0a41fb83453e701", + "namespace": "9f101e25b1a9427a9e6971d2b21c5f82", + "description": "Mettez vos comp\u00e9tences entrepreneuriales \u00e0 l'\u00e9preuve en cr\u00e9ant et en optimisant des cha\u00eenes de production complexes tout en gardant un \u0153il sur les r\u00e9sultats financiers. \u00c0 l'aube du 20e si\u00e8cle, appr\u00eatez-vous \u00e0 entrer dans un \u00e2ge d'or industriel, ou une d\u00e9pression historique.", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/rise-of-industry-offer-1p22f.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/download-rise-of-industry-offer-1uujr.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/download-rise-of-industry-offer-1uujr.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "f88fedc022fe488caaedaa5c782ff90d", + "url": null, + "items": [ + { + "id": "9f5b48a778824e6aa330d2c1a47f41b2", + "namespace": "9f101e25b1a9427a9e6971d2b21c5f82" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "26789" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "rise-of-industry-0af838", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "rise-of-industry-0af838", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 2999, + "voucherDiscount": 0, + "discount": 2999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "29,99\u00a0\u20ac", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "a19d30dc34f44923993e68b82b75a084", + "endDate": "2023-03-09T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-02T16:00:00.000Z", + "endDate": "2023-03-09T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + } + ] + } + ] + } + }, + { + "title": "Dishonored - Definitive Edition", + "id": "4d25d74b88d1474a8ab21ffb88ca6d37", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Experience the definitive Dishonored collection. This complete compilation includes Dishonored as well as all of its additional content - Dunwall City Trials, The Knife of Dunwall, The Brigmore Witches and Void Walker\u2019s Arsenal.", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/15days-day15-wrapped-desktop-carousel-image_1920x1080-ebecfa7c79f02a9de5bca79560bee953" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/15days-day15-Unwrapped-desktop-carousel-image1_1920x1080-1992edb42bb8554ddeb14d430ba3f858" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/DAY15-carousel-mobile-unwrapped-image1_1200x1600-9716d77667d2a82931c55a4e4130989e" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "dishonored-definitive-edition", + "urlSlug": "mystery-game15", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "dishonored-definitive-edition" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + } + ], + "paging": { + "count": 1000, + "total": 4 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/test_calendar.py b/tests/components/epic_games_store/test_calendar.py new file mode 100644 index 00000000000..46ca974f85c --- /dev/null +++ b/tests/components/epic_games_store/test_calendar.py @@ -0,0 +1,162 @@ +"""Tests for the Epic Games Store calendars.""" + +from unittest.mock import Mock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.calendar import ( + DOMAIN as CALENDAR_DOMAIN, + EVENT_END_DATETIME, + EVENT_START_DATETIME, + SERVICE_GET_EVENTS, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from .common import setup_platform + +from tests.common import async_fire_time_changed + + +async def test_setup_component(hass: HomeAssistant, service_multiple: Mock) -> None: + """Test setup component.""" + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + + +async def test_discount_games( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test discount games calendar.""" + freezer.move_to("2022-10-15T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.state == STATE_OFF + + freezer.move_to("2022-10-30T00:00:00.000Z") + async_fire_time_changed(hass) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.state == STATE_ON + + cal_attrs = dict(state.attributes) + assert cal_attrs == { + "friendly_name": "Epic Games Store Discount games", + "message": "Shadow of the Tomb Raider: Definitive Edition", + "all_day": False, + "start_time": "2022-10-18 08:00:00", + "end_time": "2022-11-01 08:00:00", + "location": "", + "description": "In Shadow of the Tomb Raider Definitive Edition experience the final chapter of Lara\u2019s origin as she is forged into the Tomb Raider she is destined to be.\n\nhttps://store.epicgames.com/fr/p/shadow-of-the-tomb-raider", + } + + +async def test_free_games( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test free games calendar.""" + freezer.move_to("2022-10-30T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.state == STATE_ON + + cal_attrs = dict(state.attributes) + assert cal_attrs == { + "friendly_name": "Epic Games Store Free games", + "message": "Warhammer 40,000: Mechanicus - Standard Edition", + "all_day": False, + "start_time": "2022-10-27 08:00:00", + "end_time": "2022-11-03 08:00:00", + "location": "", + "description": "Take control of the most technologically advanced army in the Imperium - The Adeptus Mechanicus. Your every decision will weigh heavily on the outcome of the mission, in this turn-based tactical game. Will you be blessed by the Omnissiah?\n\nhttps://store.epicgames.com/fr/p/warhammer-mechanicus-0e4b71", + } + + +async def test_attribute_not_found( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_attribute_not_found: Mock, +) -> None: + """Test setup calendars with attribute not found error.""" + freezer.move_to("2023-10-12T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + assert state.state == STATE_ON + + +async def test_christmas_special( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_christmas_special: Mock, +) -> None: + """Test setup calendars with Christmas special case.""" + freezer.move_to("2023-12-28T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + assert state.state == STATE_OFF + + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + assert state.state == STATE_ON + + +async def test_get_events( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test setup component with calendars.""" + freezer.move_to("2022-10-30T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + # 1 week in range of data + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: ["calendar.epic_games_store_discount_games"], + EVENT_START_DATETIME: dt_util.parse_datetime("2022-10-20T00:00:00.000Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("2022-10-27T00:00:00.000Z"), + }, + blocking=True, + return_response=True, + ) + + assert len(result["calendar.epic_games_store_discount_games"]["events"]) == 3 + + # 1 week out of range of data + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: ["calendar.epic_games_store_discount_games"], + EVENT_START_DATETIME: dt_util.parse_datetime("1970-01-01T00:00:00.000Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("1970-01-08T00:00:00.000Z"), + }, + blocking=True, + return_response=True, + ) + + assert len(result["calendar.epic_games_store_discount_games"]["events"]) == 0 diff --git a/tests/components/epic_games_store/test_config_flow.py b/tests/components/epic_games_store/test_config_flow.py new file mode 100644 index 00000000000..83e9cf9e99e --- /dev/null +++ b/tests/components/epic_games_store/test_config_flow.py @@ -0,0 +1,142 @@ +"""Test the Epic Games Store config flow.""" + +from http.client import HTTPException +from unittest.mock import patch + +from homeassistant import config_entries +from homeassistant.components.epic_games_store.config_flow import get_default_language +from homeassistant.components.epic_games_store.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND, + DATA_ERROR_WRONG_COUNTRY, + DATA_FREE_GAMES, + MOCK_COUNTRY, + MOCK_LANGUAGE, +) + + +async def test_default_language(hass: HomeAssistant) -> None: + """Test we get the form.""" + hass.config.language = "fr" + hass.config.country = "FR" + assert get_default_language(hass) == "fr" + + hass.config.language = "es" + hass.config.country = "ES" + assert get_default_language(hass) == "es-ES" + + hass.config.language = "en" + hass.config.country = "AZ" + assert get_default_language(hass) is None + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_FREE_GAMES, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}" + assert ( + result2["title"] + == f"Epic Games Store - Free Games ({MOCK_LANGUAGE}-{MOCK_COUNTRY})" + ) + assert result2["data"] == { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + } + + +async def test_form_cannot_connect(hass: HomeAssistant) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + side_effect=HTTPException, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + +async def test_form_cannot_connect_wrong_param(hass: HomeAssistant) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_ERROR_WRONG_COUNTRY, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + +async def test_form_service_error(hass: HomeAssistant) -> None: + """Test we handle service error gracefully.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_ERROR_ATTRIBUTE_NOT_FOUND, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}" + assert ( + result2["title"] + == f"Epic Games Store - Free Games ({MOCK_LANGUAGE}-{MOCK_COUNTRY})" + ) + assert result2["data"] == { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + } diff --git a/tests/components/epic_games_store/test_helper.py b/tests/components/epic_games_store/test_helper.py new file mode 100644 index 00000000000..155ccb7d211 --- /dev/null +++ b/tests/components/epic_games_store/test_helper.py @@ -0,0 +1,74 @@ +"""Tests for the Epic Games Store helpers.""" + +from typing import Any + +import pytest + +from homeassistant.components.epic_games_store.helper import ( + format_game_data, + get_game_url, + is_free_game, +) + +from .const import DATA_ERROR_ATTRIBUTE_NOT_FOUND, DATA_FREE_GAMES_ONE + +FREE_GAMES_API = DATA_FREE_GAMES_ONE["data"]["Catalog"]["searchStore"]["elements"] +FREE_GAME = FREE_GAMES_API[2] +NOT_FREE_GAME = FREE_GAMES_API[0] + + +def test_format_game_data() -> None: + """Test game data format.""" + game_data = format_game_data(FREE_GAME, "fr") + assert game_data + assert game_data["title"] + assert game_data["description"] + assert game_data["released_at"] + assert game_data["original_price"] + assert game_data["publisher"] + assert game_data["url"] + assert game_data["img_portrait"] + assert game_data["img_landscape"] + assert game_data["discount_type"] == "free" + assert game_data["discount_start_at"] + assert game_data["discount_end_at"] + + +@pytest.mark.parametrize( + ("raw_game_data", "expected_result"), + [ + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][1], + "/p/destiny-2--bungie-30th-anniversary-pack", + ), + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][4], + "/bundles/qube-ultimate-bundle", + ), + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][5], + "/p/mystery-game-7", + ), + ], +) +def test_get_game_url(raw_game_data: dict[str, Any], expected_result: bool) -> None: + """Test to get the game URL.""" + assert get_game_url(raw_game_data, "fr").endswith(expected_result) + + +@pytest.mark.parametrize( + ("raw_game_data", "expected_result"), + [ + (FREE_GAME, True), + (NOT_FREE_GAME, False), + ], +) +def test_is_free_game(raw_game_data: dict[str, Any], expected_result: bool) -> None: + """Test if this game is free.""" + assert is_free_game(raw_game_data) == expected_result From 46941adb51975c73930ca09da80340f016b41dab Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 22 Apr 2024 20:30:14 +1200 Subject: [PATCH 236/426] Bump aioesphomeapi to 24.2.0 (#115943) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index e700dddbb96..0e9a2bdc87f 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -15,7 +15,7 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "requirements": [ - "aioesphomeapi==24.1.0", + "aioesphomeapi==24.2.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 055db11d63a..fa3e5893eef 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.1.0 +aioesphomeapi==24.2.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ff19a6a5c89..8a10ee1c176 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -222,7 +222,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.1.0 +aioesphomeapi==24.2.0 # homeassistant.components.flo aioflo==2021.11.0 From 09ae8b9f52cadb1fca68056ca368a61d1bc331a1 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 22 Apr 2024 10:41:26 +0200 Subject: [PATCH 237/426] Introduce base location entity for totalconnect (#115938) * Introduce base location entity for totalconnect * Update homeassistant/components/totalconnect/entity.py Co-authored-by: TheJulianJES --------- Co-authored-by: TheJulianJES --- .../totalconnect/alarm_control_panel.py | 58 +++++++------------ .../components/totalconnect/binary_sensor.py | 9 ++- .../components/totalconnect/entity.py | 20 +++++++ 3 files changed, 45 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index fcafd47037d..9b2abedbf52 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -4,9 +4,12 @@ from __future__ import annotations from total_connect_client import ArmingHelper from total_connect_client.exceptions import BadResultCodeError, UsercodeInvalid +from total_connect_client.location import TotalConnectLocation -import homeassistant.components.alarm_control_panel as alarm -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntity, + AlarmControlPanelEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( STATE_ALARM_ARMED_AWAY, @@ -21,12 +24,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_platform -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN -from .entity import TotalConnectEntity +from .entity import TotalConnectLocationEntity SERVICE_ALARM_ARM_AWAY_INSTANT = "arm_away_instant" SERVICE_ALARM_ARM_HOME_INSTANT = "arm_home_instant" @@ -40,14 +42,12 @@ async def async_setup_entry( coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - for location_id, location in coordinator.client.locations.items(): - location_name = location.location_name + for location in coordinator.client.locations.values(): alarms.extend( TotalConnectAlarm( - coordinator=coordinator, - name=location_name, - location_id=location_id, - partition_id=partition_id, + coordinator, + location, + partition_id, ) for partition_id in location.partitions ) @@ -70,8 +70,8 @@ async def async_setup_entry( ) -class TotalConnectAlarm(TotalConnectEntity, alarm.AlarmControlPanelEntity): - """Represent an TotalConnect status.""" +class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): + """Represent a TotalConnect alarm panel.""" _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME @@ -82,19 +82,13 @@ class TotalConnectAlarm(TotalConnectEntity, alarm.AlarmControlPanelEntity): def __init__( self, coordinator: TotalConnectDataUpdateCoordinator, - name, - location_id, - partition_id, + location: TotalConnectLocation, + partition_id: int, ) -> None: """Initialize the TotalConnect status.""" - super().__init__(coordinator) - self._location_id = location_id - self._location = coordinator.client.locations[location_id] + super().__init__(coordinator, location) self._partition_id = partition_id self._partition = self._location.partitions[partition_id] - self._device = self._location.devices[self._location.security_device_id] - self._state: str | None = None - self._attr_extra_state_attributes = {} """ Set unique_id to location_id for partition 1 to avoid breaking change @@ -102,27 +96,18 @@ class TotalConnectAlarm(TotalConnectEntity, alarm.AlarmControlPanelEntity): Add _# for partition 2 and beyond. """ if partition_id == 1: - self._attr_name = name - self._attr_unique_id = f"{location_id}" + self._attr_name = self.device.name + self._attr_unique_id = str(location.location_id) else: - self._attr_name = f"{name} partition {partition_id}" - self._attr_unique_id = f"{location_id}_{partition_id}" - - @property - def device_info(self) -> DeviceInfo: - """Return device info.""" - return DeviceInfo( - identifiers={(DOMAIN, self._device.serial_number)}, - name=self._device.name, - serial_number=self._device.serial_number, - ) + self._attr_name = f"{self.device.name} partition {partition_id}" + self._attr_unique_id = f"{location.location_id}_{partition_id}" @property def state(self) -> str | None: """Return the state of the device.""" attr = { "location_name": self.name, - "location_id": self._location_id, + "location_id": self._location.location_id, "partition": self._partition_id, "ac_loss": self._location.ac_loss, "low_battery": self._location.low_battery, @@ -156,10 +141,9 @@ class TotalConnectAlarm(TotalConnectEntity, alarm.AlarmControlPanelEntity): state = STATE_ALARM_TRIGGERED attr["triggered_source"] = "Carbon Monoxide" - self._state = state self._attr_extra_state_attributes = attr - return self._state + return state async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 18340d5d6d3..9ff25e07d03 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN -from .entity import TotalConnectEntity, TotalConnectZoneEntity +from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity LOW_BATTERY = "low_battery" TAMPER = "tamper" @@ -181,7 +181,7 @@ class TotalConnectZoneBinarySensor(TotalConnectZoneEntity, BinarySensorEntity): return super().device_class -class TotalConnectAlarmBinarySensor(TotalConnectEntity, BinarySensorEntity): +class TotalConnectAlarmBinarySensor(TotalConnectLocationEntity, BinarySensorEntity): """Represent a TotalConnect alarm device binary sensors.""" entity_description: TotalConnectAlarmBinarySensorEntityDescription @@ -193,10 +193,9 @@ class TotalConnectAlarmBinarySensor(TotalConnectEntity, BinarySensorEntity): location: TotalConnectLocation, ) -> None: """Initialize the TotalConnect alarm device binary sensor.""" - super().__init__(coordinator) + super().__init__(coordinator, location) self.entity_description = entity_description - self._location = location - self._attr_name = f"{location.location_name}{entity_description.name}" + self._attr_name = f"{self.device.name}{entity_description.name}" self._attr_unique_id = f"{location.location_id}_{entity_description.key}" self._attr_extra_state_attributes = { "location_id": location.location_id, diff --git a/homeassistant/components/totalconnect/entity.py b/homeassistant/components/totalconnect/entity.py index e7ab4b3575c..deef0c5aa2a 100644 --- a/homeassistant/components/totalconnect/entity.py +++ b/homeassistant/components/totalconnect/entity.py @@ -1,5 +1,6 @@ """Base class for TotalConnect entities.""" +from total_connect_client.location import TotalConnectLocation from total_connect_client.zone import TotalConnectZone from homeassistant.helpers.device_registry import DeviceInfo @@ -12,6 +13,25 @@ class TotalConnectEntity(CoordinatorEntity[TotalConnectDataUpdateCoordinator]): """Represent a TotalConnect entity.""" +class TotalConnectLocationEntity(TotalConnectEntity): + """Represent a TotalConnect location.""" + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + location: TotalConnectLocation, + ) -> None: + """Initialize the TotalConnect location.""" + super().__init__(coordinator) + self._location = location + self.device = location.devices[location.security_device_id] + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device.serial_number)}, + name=self.device.name, + serial_number=self.device.serial_number, + ) + + class TotalConnectZoneEntity(TotalConnectEntity): """Represent a TotalConnect zone.""" From 354e8e92f39841826fc7f40ad39a8b6157d75d4b Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 22 Apr 2024 11:19:35 +0200 Subject: [PATCH 238/426] Move NextDNS data update coordinators to the coordinator module (#115919) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/nextdns/__init__.py | 126 ++---------------- .../components/nextdns/binary_sensor.py | 2 +- homeassistant/components/nextdns/button.py | 2 +- .../components/nextdns/coordinator.py | 124 +++++++++++++++++ homeassistant/components/nextdns/sensor.py | 2 +- homeassistant/components/nextdns/switch.py | 2 +- 6 files changed, 139 insertions(+), 119 deletions(-) create mode 100644 homeassistant/components/nextdns/coordinator.py diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 389173a2694..c7e4a0842fb 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -4,31 +4,15 @@ from __future__ import annotations import asyncio from datetime import timedelta -import logging -from typing import TypeVar from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ( - AnalyticsDnssec, - AnalyticsEncryption, - AnalyticsIpVersions, - AnalyticsProtocols, - AnalyticsStatus, - ApiError, - ConnectionStatus, - InvalidApiKeyError, - NextDns, - Settings, -) -from nextdns.model import NextDnsData +from nextdns import ApiError, NextDns from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ATTR_CONNECTION, @@ -44,104 +28,16 @@ from .const import ( UPDATE_INTERVAL_CONNECTION, UPDATE_INTERVAL_SETTINGS, ) - -CoordinatorDataT = TypeVar("CoordinatorDataT", bound=NextDnsData) - - -class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS data API.""" - - def __init__( - self, - hass: HomeAssistant, - nextdns: NextDns, - profile_id: str, - update_interval: timedelta, - ) -> None: - """Initialize.""" - self.nextdns = nextdns - self.profile_id = profile_id - self.profile_name = nextdns.get_profile_name(profile_id) - self.device_info = DeviceInfo( - configuration_url=f"https://my.nextdns.io/{profile_id}/setup", - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, str(profile_id))}, - manufacturer="NextDNS Inc.", - name=self.profile_name, - ) - - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval) - - async def _async_update_data(self) -> CoordinatorDataT: - """Update data via internal method.""" - try: - async with asyncio.timeout(10): - return await self._async_update_data_internal() - except (ApiError, ClientConnectorError, InvalidApiKeyError) as err: - raise UpdateFailed(err) from err - - async def _async_update_data_internal(self) -> CoordinatorDataT: - """Update data via library.""" - raise NotImplementedError("Update method not implemented") - - -class NextDnsStatusUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsStatus]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics status data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsStatus: - """Update data via library.""" - return await self.nextdns.get_analytics_status(self.profile_id) - - -class NextDnsDnssecUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsDnssec]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics Dnssec data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsDnssec: - """Update data via library.""" - return await self.nextdns.get_analytics_dnssec(self.profile_id) - - -class NextDnsEncryptionUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsEncryption]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics encryption data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsEncryption: - """Update data via library.""" - return await self.nextdns.get_analytics_encryption(self.profile_id) - - -class NextDnsIpVersionsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsIpVersions]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics IP versions data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsIpVersions: - """Update data via library.""" - return await self.nextdns.get_analytics_ip_versions(self.profile_id) - - -class NextDnsProtocolsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsProtocols]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics protocols data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsProtocols: - """Update data via library.""" - return await self.nextdns.get_analytics_protocols(self.profile_id) - - -class NextDnsSettingsUpdateCoordinator(NextDnsUpdateCoordinator[Settings]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS connection data from API.""" - - async def _async_update_data_internal(self) -> Settings: - """Update data via library.""" - return await self.nextdns.get_settings(self.profile_id) - - -class NextDnsConnectionUpdateCoordinator(NextDnsUpdateCoordinator[ConnectionStatus]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS connection data from API.""" - - async def _async_update_data_internal(self) -> ConnectionStatus: - """Update data via library.""" - return await self.nextdns.connection_status(self.profile_id) - - -_LOGGER = logging.getLogger(__name__) +from .coordinator import ( + NextDnsConnectionUpdateCoordinator, + NextDnsDnssecUpdateCoordinator, + NextDnsEncryptionUpdateCoordinator, + NextDnsIpVersionsUpdateCoordinator, + NextDnsProtocolsUpdateCoordinator, + NextDnsSettingsUpdateCoordinator, + NextDnsStatusUpdateCoordinator, + NextDnsUpdateCoordinator, +) PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] COORDINATORS: list[tuple[str, type[NextDnsUpdateCoordinator], timedelta]] = [ diff --git a/homeassistant/components/nextdns/binary_sensor.py b/homeassistant/components/nextdns/binary_sensor.py index f6860586808..1bb79cf4fce 100644 --- a/homeassistant/components/nextdns/binary_sensor.py +++ b/homeassistant/components/nextdns/binary_sensor.py @@ -19,8 +19,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsConnectionUpdateCoordinator from .const import ATTR_CONNECTION, DOMAIN +from .coordinator import CoordinatorDataT, NextDnsConnectionUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/button.py b/homeassistant/components/nextdns/button.py index d74152248a5..d61c953f260 100644 --- a/homeassistant/components/nextdns/button.py +++ b/homeassistant/components/nextdns/button.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import NextDnsStatusUpdateCoordinator from .const import ATTR_STATUS, DOMAIN +from .coordinator import NextDnsStatusUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py new file mode 100644 index 00000000000..cad1aeac070 --- /dev/null +++ b/homeassistant/components/nextdns/coordinator.py @@ -0,0 +1,124 @@ +"""NextDns coordinator.""" + +import asyncio +from datetime import timedelta +import logging +from typing import TypeVar + +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import ( + AnalyticsDnssec, + AnalyticsEncryption, + AnalyticsIpVersions, + AnalyticsProtocols, + AnalyticsStatus, + ApiError, + ConnectionStatus, + InvalidApiKeyError, + NextDns, + Settings, +) +from nextdns.model import NextDnsData + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +CoordinatorDataT = TypeVar("CoordinatorDataT", bound=NextDnsData) + + +class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): + """Class to manage fetching NextDNS data API.""" + + def __init__( + self, + hass: HomeAssistant, + nextdns: NextDns, + profile_id: str, + update_interval: timedelta, + ) -> None: + """Initialize.""" + self.nextdns = nextdns + self.profile_id = profile_id + self.profile_name = nextdns.get_profile_name(profile_id) + self.device_info = DeviceInfo( + configuration_url=f"https://my.nextdns.io/{profile_id}/setup", + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, str(profile_id))}, + manufacturer="NextDNS Inc.", + name=self.profile_name, + ) + + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval) + + async def _async_update_data(self) -> CoordinatorDataT: + """Update data via internal method.""" + try: + async with asyncio.timeout(10): + return await self._async_update_data_internal() + except (ApiError, ClientConnectorError, InvalidApiKeyError) as err: + raise UpdateFailed(err) from err + + async def _async_update_data_internal(self) -> CoordinatorDataT: + """Update data via library.""" + raise NotImplementedError("Update method not implemented") + + +class NextDnsStatusUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsStatus]): + """Class to manage fetching NextDNS analytics status data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsStatus: + """Update data via library.""" + return await self.nextdns.get_analytics_status(self.profile_id) + + +class NextDnsDnssecUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsDnssec]): + """Class to manage fetching NextDNS analytics Dnssec data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsDnssec: + """Update data via library.""" + return await self.nextdns.get_analytics_dnssec(self.profile_id) + + +class NextDnsEncryptionUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsEncryption]): + """Class to manage fetching NextDNS analytics encryption data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsEncryption: + """Update data via library.""" + return await self.nextdns.get_analytics_encryption(self.profile_id) + + +class NextDnsIpVersionsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsIpVersions]): + """Class to manage fetching NextDNS analytics IP versions data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsIpVersions: + """Update data via library.""" + return await self.nextdns.get_analytics_ip_versions(self.profile_id) + + +class NextDnsProtocolsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsProtocols]): + """Class to manage fetching NextDNS analytics protocols data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsProtocols: + """Update data via library.""" + return await self.nextdns.get_analytics_protocols(self.profile_id) + + +class NextDnsSettingsUpdateCoordinator(NextDnsUpdateCoordinator[Settings]): + """Class to manage fetching NextDNS connection data from API.""" + + async def _async_update_data_internal(self) -> Settings: + """Update data via library.""" + return await self.nextdns.get_settings(self.profile_id) + + +class NextDnsConnectionUpdateCoordinator(NextDnsUpdateCoordinator[ConnectionStatus]): + """Class to manage fetching NextDNS connection data from API.""" + + async def _async_update_data_internal(self) -> ConnectionStatus: + """Update data via library.""" + return await self.nextdns.connection_status(self.profile_id) diff --git a/homeassistant/components/nextdns/sensor.py b/homeassistant/components/nextdns/sensor.py index 4357179cbdb..3ac2179ed31 100644 --- a/homeassistant/components/nextdns/sensor.py +++ b/homeassistant/components/nextdns/sensor.py @@ -26,7 +26,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsUpdateCoordinator from .const import ( ATTR_DNSSEC, ATTR_ENCRYPTION, @@ -35,6 +34,7 @@ from .const import ( ATTR_STATUS, DOMAIN, ) +from .coordinator import CoordinatorDataT, NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/switch.py b/homeassistant/components/nextdns/switch.py index 81bf8b4e8c6..dfb796efd8c 100644 --- a/homeassistant/components/nextdns/switch.py +++ b/homeassistant/components/nextdns/switch.py @@ -18,8 +18,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsSettingsUpdateCoordinator from .const import ATTR_SETTINGS, DOMAIN +from .coordinator import CoordinatorDataT, NextDnsSettingsUpdateCoordinator PARALLEL_UPDATES = 1 From 6985d36f18d5dd3a8bddac190a89f42ef1fb187f Mon Sep 17 00:00:00 2001 From: Aidan Timson Date: Mon, 22 Apr 2024 11:39:53 +0100 Subject: [PATCH 239/426] Update ovoenergy to 2.0.0 (#115921) Co-authored-by: J. Nick Koston --- .../components/ovo_energy/__init__.py | 25 ++++++++++++------- .../components/ovo_energy/config_flow.py | 24 ++++++++++++++---- .../components/ovo_energy/manifest.json | 2 +- homeassistant/components/ovo_energy/sensor.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/ovo_energy/test_config_flow.py | 22 ++++++++++++---- 7 files changed, 56 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/ovo_energy/__init__.py b/homeassistant/components/ovo_energy/__init__.py index e0c2b77664a..d207f3161f4 100644 --- a/homeassistant/components/ovo_energy/__init__.py +++ b/homeassistant/components/ovo_energy/__init__.py @@ -7,13 +7,14 @@ from datetime import timedelta import logging import aiohttp +from ovoenergy import OVOEnergy from ovoenergy.models import OVODailyUsage -from ovoenergy.ovoenergy import OVOEnergy from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, @@ -32,29 +33,35 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up OVO Energy from a config entry.""" - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(hass), + ) + + if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account try: - authenticated = await client.authenticate( + if not await client.authenticate( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], - entry.data[CONF_ACCOUNT], - ) + ): + raise ConfigEntryAuthFailed + + await client.bootstrap_accounts() except aiohttp.ClientError as exception: _LOGGER.warning(exception) raise ConfigEntryNotReady from exception - if not authenticated: - raise ConfigEntryAuthFailed - async def async_update_data() -> OVODailyUsage: """Fetch data from OVO Energy.""" + if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account + async with asyncio.timeout(10): try: authenticated = await client.authenticate( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], - entry.data[CONF_ACCOUNT], ) except aiohttp.ClientError as exception: raise UpdateFailed(exception) from exception diff --git a/homeassistant/components/ovo_energy/config_flow.py b/homeassistant/components/ovo_energy/config_flow.py index 41c64913764..87d53e5fbf9 100644 --- a/homeassistant/components/ovo_energy/config_flow.py +++ b/homeassistant/components/ovo_energy/config_flow.py @@ -6,11 +6,12 @@ from collections.abc import Mapping from typing import Any import aiohttp -from ovoenergy.ovoenergy import OVOEnergy +from ovoenergy import OVOEnergy import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_ACCOUNT, DOMAIN @@ -41,13 +42,19 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} if user_input is not None: - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(self.hass), + ) + + if custom_account := user_input.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account + try: authenticated = await client.authenticate( user_input[CONF_USERNAME], user_input[CONF_PASSWORD], - user_input.get(CONF_ACCOUNT, None), ) + await client.bootstrap_accounts() except aiohttp.ClientError: errors["base"] = "cannot_connect" else: @@ -86,10 +93,17 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {CONF_USERNAME: self.username} if user_input is not None and user_input.get(CONF_PASSWORD) is not None: - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(self.hass), + ) + + if self.account is not None: + client.custom_account_id = self.account + try: authenticated = await client.authenticate( - self.username, user_input[CONF_PASSWORD], self.account + self.username, + user_input[CONF_PASSWORD], ) except aiohttp.ClientError: errors["base"] = "connection_error" diff --git a/homeassistant/components/ovo_energy/manifest.json b/homeassistant/components/ovo_energy/manifest.json index 9435958f1fe..af4a313206e 100644 --- a/homeassistant/components/ovo_energy/manifest.json +++ b/homeassistant/components/ovo_energy/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["ovoenergy"], - "requirements": ["ovoenergy==1.3.1"] + "requirements": ["ovoenergy==2.0.0"] } diff --git a/homeassistant/components/ovo_energy/sensor.py b/homeassistant/components/ovo_energy/sensor.py index d5384837e9c..5b16e8cdef5 100644 --- a/homeassistant/components/ovo_energy/sensor.py +++ b/homeassistant/components/ovo_energy/sensor.py @@ -7,8 +7,8 @@ import dataclasses from datetime import datetime, timedelta from typing import Final +from ovoenergy import OVOEnergy from ovoenergy.models import OVODailyUsage -from ovoenergy.ovoenergy import OVOEnergy from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/requirements_all.txt b/requirements_all.txt index fa3e5893eef..573f2f4a8d3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1495,7 +1495,7 @@ orvibo==1.1.2 ourgroceries==1.5.4 # homeassistant.components.ovo_energy -ovoenergy==1.3.1 +ovoenergy==2.0.0 # homeassistant.components.p1_monitor p1monitor==3.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8a10ee1c176..b3e0b9feaf6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1189,7 +1189,7 @@ oralb-ble==0.17.6 ourgroceries==1.5.4 # homeassistant.components.ovo_energy -ovoenergy==1.3.1 +ovoenergy==2.0.0 # homeassistant.components.p1_monitor p1monitor==3.0.0 diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index 7575f1edb29..00899e745b9 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import aiohttp from homeassistant import config_entries -from homeassistant.components.ovo_energy.const import DOMAIN +from homeassistant.components.ovo_energy.const import CONF_ACCOUNT, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -13,7 +13,11 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry FIXTURE_REAUTH_INPUT = {CONF_PASSWORD: "something1"} -FIXTURE_USER_INPUT = {CONF_USERNAME: "example@example.com", CONF_PASSWORD: "something"} +FIXTURE_USER_INPUT = { + CONF_USERNAME: "example@example.com", + CONF_PASSWORD: "something", + CONF_ACCOUNT: "123456", +} UNIQUE_ID = "example@example.com" @@ -37,9 +41,14 @@ async def test_authorization_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - with patch( - "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", - return_value=False, + with ( + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", + return_value=False, + ), + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.bootstrap_accounts", + ), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -88,6 +97,9 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=True, ), + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.bootstrap_accounts", + ), patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.username", "some_name", From 693bd08a0ba3bcb56a27bfae6adaef3a613dd788 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 22 Apr 2024 13:01:31 +0200 Subject: [PATCH 240/426] Add snapshot tests to Totalconnect (#115952) * Add snapshot tests to Totalconnect * Add snapshot tests to Totalconnect --- .../snapshots/test_alarm_control_panel.ambr | 117 ++ .../snapshots/test_binary_sensor.ambr | 1095 +++++++++++++++++ .../totalconnect/test_alarm_control_panel.py | 23 +- .../totalconnect/test_binary_sensor.py | 34 +- 4 files changed, 1229 insertions(+), 40 deletions(-) create mode 100644 tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr create mode 100644 tests/components/totalconnect/snapshots/test_binary_sensor.ambr diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..4dc6b576ba3 --- /dev/null +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,117 @@ +# serializer version: 1 +# name: test_attributes[alarm_control_panel.test-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.test', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'test', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456', + 'unit_of_measurement': None, + }) +# --- +# name: test_attributes[alarm_control_panel.test-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'ac_loss': False, + 'changed_by': None, + 'code_arm_required': True, + 'code_format': None, + 'cover_tampered': False, + 'friendly_name': 'test', + 'location_id': '123456', + 'location_name': 'test', + 'low_battery': False, + 'partition': 1, + 'supported_features': , + 'triggered_source': None, + 'triggered_zone': None, + }), + 'context': , + 'entity_id': 'alarm_control_panel.test', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_attributes[alarm_control_panel.test_partition_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.test_partition_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'test partition 2', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_attributes[alarm_control_panel.test_partition_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'ac_loss': False, + 'changed_by': None, + 'code_arm_required': True, + 'code_format': None, + 'cover_tampered': False, + 'friendly_name': 'test partition 2', + 'location_id': '123456', + 'location_name': 'test partition 2', + 'low_battery': False, + 'partition': 2, + 'supported_features': , + 'triggered_source': None, + 'triggered_zone': None, + }), + 'context': , + 'entity_id': 'alarm_control_panel.test_partition_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..a79f609488d --- /dev/null +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1095 @@ +# serializer version: 1 +# name: test_entity_registry[binary_sensor.fire-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.fire', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fire', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'Fire', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.fire_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fire_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fire low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Fire low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.fire_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fire_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fire tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Fire tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gas', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Gas', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'gas', + 'friendly_name': 'Gas', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.gas_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Gas low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Gas low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.gas_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Gas tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Gas tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.medical-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.medical', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Medical', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_5_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.medical-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'safety', + 'friendly_name': 'Medical', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '5', + }), + 'context': , + 'entity_id': 'binary_sensor.medical', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.motion', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Motion', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.motion_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Motion low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.motion_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Motion tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.security-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.security', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Security', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Security', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.security_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.security_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Security low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Security low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.security_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.security_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Security tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Security tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Temperature', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.temperature_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Temperature low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.temperature_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Temperature tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'test low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'test low battery', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_power', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'test power', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'test power', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'test tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'test tamper', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.unknown', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Unknown', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Unknown', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.unknown_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Unknown low battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Unknown low battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.unknown_tamper', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Unknown tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Unknown tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index fa2e997756d..176fe54c34a 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -4,6 +4,7 @@ from datetime import timedelta from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from total_connect_client.exceptions import ServiceUnavailable, TotalConnectError from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN @@ -14,7 +15,6 @@ from homeassistant.components.totalconnect.alarm_control_panel import ( ) from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_FRIENDLY_NAME, SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, @@ -36,7 +36,6 @@ from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util from .common import ( - LOCATION_ID, RESPONSE_ARM_FAILURE, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY, @@ -58,7 +57,7 @@ from .common import ( setup_platform, ) -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform ENTITY_ID = "alarm_control_panel.test" ENTITY_ID_2 = "alarm_control_panel.test_partition_2" @@ -67,28 +66,20 @@ DATA = {ATTR_ENTITY_ID: ENTITY_ID} DELAY = timedelta(seconds=10) -async def test_attributes(hass: HomeAssistant) -> None: +async def test_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test the alarm control panel attributes are correct.""" - await setup_platform(hass, ALARM_DOMAIN) + entry = await setup_platform(hass, ALARM_DOMAIN) with patch( "homeassistant.components.totalconnect.TotalConnectClient.request", return_value=RESPONSE_DISARMED, ) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_DISARMED mock_request.assert_called_once() - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "test" - entity_registry = er.async_get(hass) - entry = entity_registry.async_get(ENTITY_ID) - # TotalConnect partition #1 alarm device unique_id is the location_id - assert entry.unique_id == LOCATION_ID - - entry2 = entity_registry.async_get(ENTITY_ID_2) - # TotalConnect partition #2 unique_id is the location_id + "_{partition_number}" - assert entry2.unique_id == LOCATION_ID + "_2" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) assert mock_request.call_count == 1 diff --git a/tests/components/totalconnect/test_binary_sensor.py b/tests/components/totalconnect/test_binary_sensor.py index 8ff548850d9..1a8a65391f5 100644 --- a/tests/components/totalconnect/test_binary_sensor.py +++ b/tests/components/totalconnect/test_binary_sensor.py @@ -2,6 +2,8 @@ from unittest.mock import patch +from syrupy import SnapshotAssertion + from homeassistant.components.binary_sensor import ( DOMAIN as BINARY_SENSOR, BinarySensorDeviceClass, @@ -10,7 +12,9 @@ from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import LOCATION_ID, RESPONSE_DISARMED, ZONE_NORMAL, setup_platform +from .common import RESPONSE_DISARMED, ZONE_NORMAL, setup_platform + +from tests.common import snapshot_platform ZONE_ENTITY_ID = "binary_sensor.security" ZONE_LOW_BATTERY_ID = "binary_sensor.security_low_battery" @@ -20,31 +24,13 @@ PANEL_TAMPER_ID = "binary_sensor.test_tamper" PANEL_POWER_ID = "binary_sensor.test_power" -async def test_entity_registry(hass: HomeAssistant) -> None: +async def test_entity_registry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test the binary sensor is registered in entity registry.""" - await setup_platform(hass, BINARY_SENSOR) - entity_registry = er.async_get(hass) + entry = await setup_platform(hass, BINARY_SENSOR) - # ensure zone 1 plus two diagnostic zones are created - entry = entity_registry.async_get(ZONE_ENTITY_ID) - entry_low_battery = entity_registry.async_get(ZONE_LOW_BATTERY_ID) - entry_tamper = entity_registry.async_get(ZONE_TAMPER_ID) - - assert entry.unique_id == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_zone" - assert ( - entry_low_battery.unique_id - == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_low_battery" - ) - assert entry_tamper.unique_id == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_tamper" - - # ensure panel diagnostic zones are created - panel_battery = entity_registry.async_get(PANEL_BATTERY_ID) - panel_tamper = entity_registry.async_get(PANEL_TAMPER_ID) - panel_power = entity_registry.async_get(PANEL_POWER_ID) - - assert panel_battery.unique_id == f"{LOCATION_ID}_low_battery" - assert panel_tamper.unique_id == f"{LOCATION_ID}_tamper" - assert panel_power.unique_id == f"{LOCATION_ID}_power" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_state_and_attributes(hass: HomeAssistant) -> None: From 9b6863f18279a9e2169e56dd7808f6d19eca30a1 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 22 Apr 2024 23:12:22 +1200 Subject: [PATCH 241/426] ESPHome: Add datetime entities (#115942) --- homeassistant/components/esphome/datetime.py | 48 +++++++++++ .../components/esphome/entry_data.py | 2 + tests/components/esphome/test_datetime.py | 79 +++++++++++++++++++ 3 files changed, 129 insertions(+) create mode 100644 homeassistant/components/esphome/datetime.py create mode 100644 tests/components/esphome/test_datetime.py diff --git a/homeassistant/components/esphome/datetime.py b/homeassistant/components/esphome/datetime.py new file mode 100644 index 00000000000..15509a46158 --- /dev/null +++ b/homeassistant/components/esphome/datetime.py @@ -0,0 +1,48 @@ +"""Support for esphome datetimes.""" + +from __future__ import annotations + +from datetime import datetime + +from aioesphomeapi import DateTimeInfo, DateTimeState + +from homeassistant.components.datetime import DateTimeEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.util.dt as dt_util + +from .entity import EsphomeEntity, esphome_state_property, platform_async_setup_entry + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up esphome datetimes based on a config entry.""" + await platform_async_setup_entry( + hass, + entry, + async_add_entities, + info_type=DateTimeInfo, + entity_type=EsphomeDateTime, + state_type=DateTimeState, + ) + + +class EsphomeDateTime(EsphomeEntity[DateTimeInfo, DateTimeState], DateTimeEntity): + """A datetime implementation for esphome.""" + + @property + @esphome_state_property + def native_value(self) -> datetime | None: + """Return the state of the entity.""" + state = self._state + if state.missing_state: + return None + return dt_util.utc_from_timestamp(state.epoch_seconds) + + async def async_set_value(self, value: datetime) -> None: + """Update the current datetime.""" + self._client.datetime_command(self._key, int(value.timestamp())) diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index 52dc1f17ad6..a840fc3a17e 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -20,6 +20,7 @@ from aioesphomeapi import ( ClimateInfo, CoverInfo, DateInfo, + DateTimeInfo, DeviceInfo, EntityInfo, EntityState, @@ -68,6 +69,7 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = { ClimateInfo: Platform.CLIMATE, CoverInfo: Platform.COVER, DateInfo: Platform.DATE, + DateTimeInfo: Platform.DATETIME, FanInfo: Platform.FAN, LightInfo: Platform.LIGHT, LockInfo: Platform.LOCK, diff --git a/tests/components/esphome/test_datetime.py b/tests/components/esphome/test_datetime.py new file mode 100644 index 00000000000..3bdc196de95 --- /dev/null +++ b/tests/components/esphome/test_datetime.py @@ -0,0 +1,79 @@ +"""Test ESPHome datetimes.""" + +from unittest.mock import call + +from aioesphomeapi import APIClient, DateTimeInfo, DateTimeState + +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + + +async def test_generic_datetime_entity( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic datetime entity.""" + entity_info = [ + DateTimeInfo( + object_id="mydatetime", + key=1, + name="my datetime", + unique_id="my_datetime", + ) + ] + states = [DateTimeState(key=1, epoch_seconds=1713270896)] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("datetime.test_mydatetime") + assert state is not None + assert state.state == "2024-04-16T12:34:56+00:00" + + await hass.services.async_call( + DATETIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: "datetime.test_mydatetime", + ATTR_DATETIME: "2000-01-01T01:23:45+00:00", + }, + blocking=True, + ) + mock_client.datetime_command.assert_has_calls([call(1, 946689825)]) + mock_client.datetime_command.reset_mock() + + +async def test_generic_datetime_missing_state( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic datetime entity with missing state.""" + entity_info = [ + DateTimeInfo( + object_id="mydatetime", + key=1, + name="my datetime", + unique_id="my_datetime", + ) + ] + states = [DateTimeState(key=1, missing_state=True)] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("datetime.test_mydatetime") + assert state is not None + assert state.state == STATE_UNKNOWN From 5a7e921ae3fb455f076b3fdf8786bd207e743331 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Mon, 22 Apr 2024 13:24:23 +0200 Subject: [PATCH 242/426] Address late review for AVM Fritz!Smarthome (#115960) fix typo --- homeassistant/components/fritzbox/coordinator.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index a9cfc25b223..06454fa912a 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -58,18 +58,18 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat list(self.data.devices) + list(self.data.templates) ) - def cleanup_removed_devices(self, avaiable_ains: list[str]) -> None: + def cleanup_removed_devices(self, available_ains: list[str]) -> None: """Cleanup entity and device registry from removed devices.""" entity_reg = er.async_get(self.hass) for entity in er.async_entries_for_config_entry( entity_reg, self.config_entry.entry_id ): - if entity.unique_id.split("_")[0] not in avaiable_ains: + if entity.unique_id.split("_")[0] not in available_ains: LOGGER.debug("Removing obsolete entity entry %s", entity.entity_id) entity_reg.async_remove(entity.entity_id) device_reg = dr.async_get(self.hass) - identifiers = {(DOMAIN, ain) for ain in avaiable_ains} + identifiers = {(DOMAIN, ain) for ain in available_ains} for device in dr.async_entries_for_config_entry( device_reg, self.config_entry.entry_id ): From 65b2c1519c45be8decaf53135f666773a513ce7f Mon Sep 17 00:00:00 2001 From: Marc-Olivier Arsenault Date: Mon, 22 Apr 2024 10:43:01 -0400 Subject: [PATCH 243/426] Reduce ecobee throttle (#115968) reduce ecobee throttle --- homeassistant/components/ecobee/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index 8083d0efcb4..c9d45b512bd 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -22,7 +22,7 @@ from .const import ( PLATFORMS, ) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=180) +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Schema({vol.Optional(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA From 37d329c2867629db59a12cc3ed63675315aa288f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 22 Apr 2024 16:51:19 +0200 Subject: [PATCH 244/426] Improve reliability of homeassistant_alerts updates (#115974) --- .../components/homeassistant_alerts/__init__.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/homeassistant_alerts/__init__.py b/homeassistant/components/homeassistant_alerts/__init__.py index 7dcd9f8db97..ef5e330699a 100644 --- a/homeassistant/components/homeassistant_alerts/__init__.py +++ b/homeassistant/components/homeassistant_alerts/__init__.py @@ -20,7 +20,7 @@ from homeassistant.helpers.issue_registry import ( async_create_issue, async_delete_issue, ) -from homeassistant.helpers.start import async_at_start +from homeassistant.helpers.start import async_at_started from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.setup import EventComponentLoaded @@ -30,6 +30,8 @@ DOMAIN = "homeassistant_alerts" UPDATE_INTERVAL = timedelta(hours=3) _LOGGER = logging.getLogger(__name__) +REQUEST_TIMEOUT = aiohttp.ClientTimeout(total=30) + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -52,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: try: response = await async_get_clientsession(hass).get( f"https://alerts.home-assistant.io/alerts/{alert.alert_id}.json", - timeout=aiohttp.ClientTimeout(total=30), + timeout=REQUEST_TIMEOUT, ) except TimeoutError: _LOGGER.warning("Error fetching %s: timeout", alert.filename) @@ -106,7 +108,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await coordinator.async_refresh() hass.bus.async_listen(EVENT_COMPONENT_LOADED, _component_loaded) - async_at_start(hass, initial_refresh) + async_at_started(hass, initial_refresh) return True @@ -146,7 +148,7 @@ class AlertUpdateCoordinator(DataUpdateCoordinator[dict[str, IntegrationAlert]]) async def _async_update_data(self) -> dict[str, IntegrationAlert]: response = await async_get_clientsession(self.hass).get( "https://alerts.home-assistant.io/alerts.json", - timeout=aiohttp.ClientTimeout(total=10), + timeout=REQUEST_TIMEOUT, ) alerts = await response.json() From 20adc5be70a57a454ca0e250716d27259ec4aad8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 22 Apr 2024 16:52:04 +0200 Subject: [PATCH 245/426] Small fixes for processing integration requirements (#115973) --- homeassistant/requirements.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/requirements.py b/homeassistant/requirements.py index e78398ebf03..e282ced90ac 100644 --- a/homeassistant/requirements.py +++ b/homeassistant/requirements.py @@ -122,6 +122,11 @@ def _install_requirements_if_missing( return installed, failures +def _set_result_unless_done(future: asyncio.Future[None]) -> None: + if not future.done(): + future.set_result(None) + + class RequirementsManager: """Manage requirements.""" @@ -144,16 +149,13 @@ class RequirementsManager: is invalid, RequirementNotFound if there was some type of failure to install requirements. """ - if done is None: done = {domain} else: done.add(domain) - integration = await async_get_integration(self.hass, domain) - if self.hass.config.skip_pip: - return integration + return await async_get_integration(self.hass, domain) cache = self.integrations_with_reqs int_or_fut = cache.get(domain, UNDEFINED) @@ -170,19 +172,19 @@ class RequirementsManager: if int_or_fut is not UNDEFINED: return cast(Integration, int_or_fut) - event = cache[domain] = self.hass.loop.create_future() + future = cache[domain] = self.hass.loop.create_future() try: + integration = await async_get_integration(self.hass, domain) await self._async_process_integration(integration, done) except Exception: del cache[domain] - if not event.done(): - event.set_result(None) raise + finally: + _set_result_unless_done(future) cache[domain] = integration - if not event.done(): - event.set_result(None) + _set_result_unless_done(future) return integration async def _async_process_integration( From 2afaa3d3337df0332d024928686636119e435392 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Mon, 22 Apr 2024 10:54:04 -0400 Subject: [PATCH 246/426] Remove YAML support from Hydrawise (#115966) --- .../components/hydrawise/__init__.py | 39 +----- .../components/hydrawise/config_flow.py | 52 -------- .../components/hydrawise/strings.json | 6 - .../components/hydrawise/test_config_flow.py | 118 +----------------- tests/components/hydrawise/test_init.py | 20 +-- 5 files changed, 4 insertions(+), 231 deletions(-) diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 541d4211e49..62a4cacc5c4 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -1,52 +1,17 @@ """Support for Hydrawise cloud.""" from pydrawise import legacy -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_API_KEY, - CONF_SCAN_INTERVAL, - Platform, -) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, SCAN_INTERVAL from .coordinator import HydrawiseDataUpdateCoordinator -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_ACCESS_TOKEN): cv.string, - vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Hunter Hydrawise component.""" - if DOMAIN not in config: - return True - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_API_KEY: config[DOMAIN][CONF_ACCESS_TOKEN]}, - ) - ) - return True - - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Hydrawise from a config entry.""" access_token = config_entry.data[CONF_API_KEY] diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index cfaaefcd03a..8233074c3cd 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -11,9 +11,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN -from homeassistant.data_entry_flow import AbortFlow, FlowResultType -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, LOGGER @@ -42,40 +39,6 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title="Hydrawise", data={CONF_API_KEY: api_key}) - def _import_issue(self, error_type: str) -> ConfigFlowResult: - """Create an issue about a YAML import failure.""" - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error_type}", - breaks_in_ha_version="2024.4.0", - is_fixable=False, - severity=IssueSeverity.ERROR, - translation_key="deprecated_yaml_import_issue", - translation_placeholders={ - "error_type": error_type, - "url": "/config/integrations/dashboard/add?domain=hydrawise", - }, - ) - return self.async_abort(reason=error_type) - - def _deprecated_yaml_issue(self) -> None: - """Create an issue about YAML deprecation.""" - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.4.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Hydrawise", - }, - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -94,18 +57,3 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), errors=errors, ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import data from YAML.""" - try: - result = await self._create_entry( - import_data.get(CONF_API_KEY, ""), - on_failure=self._import_issue, - ) - except AbortFlow: - self._deprecated_yaml_issue() - raise - - if result["type"] == FlowResultType.CREATE_ENTRY: - self._deprecated_yaml_issue() - return result diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index 8f079abcc7d..1c96098db35 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -16,12 +16,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } }, - "issues": { - "deprecated_yaml_import_issue": { - "title": "The Hydrawise YAML configuration import failed", - "description": "Configuring Hydrawise using YAML is being removed but there was an {error_type} error importing your YAML configuration.\n\nEnsure connection to Hydrawise works and restart Home Assistant to try again or remove the Hydrawise YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } - }, "entity": { "binary_sensor": { "watering": { diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index b0d5b098309..be0ef90becd 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -8,12 +8,8 @@ import pytest from homeassistant import config_entries from homeassistant.components.hydrawise.const import DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_SCAN_INTERVAL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.issue_registry as ir - -from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -87,115 +83,3 @@ async def test_form_connect_timeout( mock_pydrawise.get_user.return_value = user result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] is FlowResultType.CREATE_ENTRY - - -async def test_flow_import_success( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User -) -> None: - """Test that we can import a YAML config.""" - mock_pydrawise.get_user.return_value = User - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Hydrawise" - assert result["data"] == { - CONF_API_KEY: "__api_key__", - } - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" - ) - assert issue.translation_key == "deprecated_yaml" - - -async def test_flow_import_api_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock -) -> None: - """Test that we handle API errors on YAML import.""" - mock_pydrawise.get_user.side_effect = ClientError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_cannot_connect" - ) - assert issue.translation_key == "deprecated_yaml_import_issue" - - -async def test_flow_import_connect_timeout( - hass: HomeAssistant, mock_pydrawise: AsyncMock -) -> None: - """Test that we handle connection timeouts on YAML import.""" - mock_pydrawise.get_user.side_effect = TimeoutError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "timeout_connect" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_timeout_connect" - ) - assert issue.translation_key == "deprecated_yaml_import_issue" - - -async def test_flow_import_already_imported( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User -) -> None: - """Test that we can handle a YAML config already imported.""" - mock_config_entry = MockConfigEntry( - title="Hydrawise", - domain=DOMAIN, - data={ - CONF_API_KEY: "__api_key__", - }, - unique_id="hydrawise-12345", - ) - mock_config_entry.add_to_hass(hass) - - mock_pydrawise.get_user.return_value = user - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result.get("reason") == "already_configured" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" - ) - assert issue.translation_key == "deprecated_yaml" diff --git a/tests/components/hydrawise/test_init.py b/tests/components/hydrawise/test_init.py index 6b41867b044..91c99833531 100644 --- a/tests/components/hydrawise/test_init.py +++ b/tests/components/hydrawise/test_init.py @@ -5,29 +5,11 @@ from unittest.mock import AsyncMock from aiohttp import ClientError from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def test_setup_import_success( - hass: HomeAssistant, mock_pydrawise: AsyncMock -) -> None: - """Test that setup with a YAML config triggers an import and warning.""" - config = {"hydrawise": {CONF_ACCESS_TOKEN: "_access-token_"}} - assert await async_setup_component(hass, "hydrawise", config) - await hass.async_block_till_done() - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" - ) - assert issue.translation_key == "deprecated_yaml" - - async def test_connect_retry( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_pydrawise: AsyncMock ) -> None: From 124eca4d534f705ac1092910223643e38ff18af9 Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Mon, 22 Apr 2024 11:46:12 -0400 Subject: [PATCH 247/426] Use start helper in squeezebox for server discovery (#115978) --- homeassistant/components/squeezebox/media_player.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 7d072fa2570..a3a404fe1ae 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -28,7 +28,6 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, CONF_USERNAME, - EVENT_HOMEASSISTANT_START, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -44,6 +43,7 @@ from homeassistant.helpers.dispatcher import ( ) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.start import async_at_start from homeassistant.util.dt import utcnow from .browse_media import ( @@ -207,12 +207,7 @@ async def async_setup_entry( platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running - if hass.is_running: - hass.async_create_task(start_server_discovery(hass)) - else: - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_START, start_server_discovery(hass) - ) + config_entry.async_on_unload(async_at_start(hass, start_server_discovery)) class SqueezeBoxEntity(MediaPlayerEntity): From 2caca7fbe3e39acc9295ca42383e8392d7a50020 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 22 Apr 2024 19:23:08 +0200 Subject: [PATCH 248/426] Generate requirements per supported architecture (#115708) * Generate requirements per supported architecture * Don't store wheels requirements in the repo * Dry run * Set Python version * Install base packages * Fix * Fix * Fix * Fix typo Co-authored-by: Martin Hjelmare * Genarate requirements_all_pytest.txt * Fix hassfest * Reenable building wheels * Remove unneeded code * Address review comment * Fix lying comment * Add tests, address review comments * Deduplicate * Fix file name * Add comment --------- Co-authored-by: Martin Hjelmare --- .github/workflows/ci.yaml | 8 +- .github/workflows/wheels.yml | 60 +++++++------ script/gen_requirements_all.py | 104 +++++++++++++++++++--- script/hassfest/requirements.py | 10 +-- tests/script/__init__.py | 1 + tests/script/test_gen_requirements_all.py | 25 ++++++ 6 files changed, 165 insertions(+), 43 deletions(-) create mode 100644 tests/script/__init__.py create mode 100644 tests/script/test_gen_requirements_all.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 581a36be953..0dc8f34570c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -97,7 +97,8 @@ jobs: hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_all.txt') }}-${{ - hashFiles('homeassistant/package_constraints.txt') }}" >> $GITHUB_OUTPUT + hashFiles('homeassistant/package_constraints.txt') }}-${{ + hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT - name: Generate partial pre-commit restore key id: generate_pre-commit_cache_key run: >- @@ -497,8 +498,9 @@ jobs: python --version pip install "$(grep '^uv' < requirements_test.txt)" uv pip install -U "pip>=21.3.1" setuptools wheel - uv pip install -r requirements_all.txt - uv pip install "$(grep 'python-gammu' < requirements_all.txt | sed -e 's|# python-gammu|python-gammu|g')" + uv pip install -r requirements.txt + python -m script.gen_requirements_all ci + uv pip install -r requirements_all_pytest.txt uv pip install -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 24033a92fd5..6618eb9963b 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -14,6 +14,10 @@ on: - "homeassistant/package_constraints.txt" - "requirements_all.txt" - "requirements.txt" + - "script/gen_requirements_all.py" + +env: + DEFAULT_PYTHON: "3.12" concurrency: group: ${{ github.workflow }}-${{ github.ref_name}} @@ -30,6 +34,21 @@ jobs: - name: Checkout the repository uses: actions/checkout@v4.1.3 + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + id: python + uses: actions/setup-python@v5.1.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + check-latest: true + + - name: Create Python virtual environment + run: | + python -m venv venv + . venv/bin/activate + python --version + pip install "$(grep '^uv' < requirements_test.txt)" + uv pip install -r requirements.txt + - name: Get information id: info uses: home-assistant/actions/helpers/info@master @@ -76,6 +95,17 @@ jobs: path: ./requirements_diff.txt overwrite: true + - name: Generate requirements + run: | + . venv/bin/activate + python -m script.gen_requirements_all ci + + - name: Upload requirements_all_wheels + uses: actions/upload-artifact@v4.3.1 + with: + name: requirements_all_wheels + path: ./requirements_all_wheels_*.txt + core: name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2) if: github.repository_owner == 'home-assistant' @@ -138,30 +168,10 @@ jobs: with: name: requirements_diff - - name: (Un)comment packages - run: | - requirement_files="requirements_all.txt requirements_diff.txt" - for requirement_file in ${requirement_files}; do - sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file} - sed -i "s|# evdev|evdev|g" ${requirement_file} - sed -i "s|# pycups|pycups|g" ${requirement_file} - sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file} - sed -i "s|# python-gammu|python-gammu|g" ${requirement_file} - - # Some packages are not buildable on armhf anymore - if [ "${{ matrix.arch }}" = "armhf" ]; then - - # Pandas has issues building on armhf, it is expected they - # will drop the platform in the near future (they consider it - # "flimsy" on 386). The following packages depend on pandas, - # so we comment them out. - sed -i "s|env-canada|# env-canada|g" ${requirement_file} - sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file} - sed -i "s|pyezviz|# pyezviz|g" ${requirement_file} - sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file} - fi - - done + - name: Download requirements_all_wheels + uses: actions/download-artifact@v4.1.4 + with: + name: requirements_all_wheels - name: Split requirements all run: | @@ -169,7 +179,7 @@ jobs: # This is to prevent the build from running out of memory when # resolving packages on 32-bits systems (like armhf, armv7). - split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt + split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Create requirements for cython<3 run: | diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 7fc0907e756..a5db9997d9d 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -17,7 +17,10 @@ from typing import Any from homeassistant.util.yaml.loader import load_yaml from script.hassfest.model import Integration -COMMENT_REQUIREMENTS = ( +# Requirements which can't be installed on all systems because they rely on additional +# system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out +# in requirements_all.txt and requirements_test_all.txt. +EXCLUDED_REQUIREMENTS_ALL = { "atenpdu", # depends on pysnmp which is not maintained at this time "avea", # depends on bluepy "avion", @@ -36,10 +39,39 @@ COMMENT_REQUIREMENTS = ( "pyuserinput", "tensorflow", "tf-models-official", -) +} -COMMENT_REQUIREMENTS_NORMALIZED = { - commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS +# Requirements excluded by EXCLUDED_REQUIREMENTS_ALL which should be included when +# building integration wheels for all architectures. +INCLUDED_REQUIREMENTS_WHEELS = { + "decora-wifi", + "evdev", + "pycups", + "python-gammu", + "pyuserinput", +} + + +# Requirements to exclude or include when running github actions. +# Requirements listed in "exclude" will be commented-out in +# requirements_all_{action}.txt +# Requirements listed in "include" must be listed in EXCLUDED_REQUIREMENTS_CI, and +# will be included in requirements_all_{action}.txt + +OVERRIDDEN_REQUIREMENTS_ACTIONS = { + "pytest": {"exclude": set(), "include": {"python-gammu"}}, + "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + # Pandas has issues building on armhf, it is expected they + # will drop the platform in the near future (they consider it + # "flimsy" on 386). The following packages depend on pandas, + # so we comment them out. + "wheels_armhf": { + "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, + "include": INCLUDED_REQUIREMENTS_WHEELS, + }, + "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -254,6 +286,12 @@ def gather_recursive_requirements( return reqs +def _normalize_package_name(package_name: str) -> str: + """Normalize a package name.""" + # pipdeptree needs lowercase and dash instead of underscore or period as separator + return package_name.lower().replace("_", "-").replace(".", "-") + + def normalize_package_name(requirement: str) -> str: """Return a normalized package name from a requirement string.""" # This function is also used in hassfest. @@ -262,12 +300,24 @@ def normalize_package_name(requirement: str) -> str: return "" # pipdeptree needs lowercase and dash instead of underscore or period as separator - return match.group(1).lower().replace("_", "-").replace(".", "-") + return _normalize_package_name(match.group(1)) def comment_requirement(req: str) -> bool: """Comment out requirement. Some don't install on all systems.""" - return normalize_package_name(req) in COMMENT_REQUIREMENTS_NORMALIZED + return normalize_package_name(req) in EXCLUDED_REQUIREMENTS_ALL + + +def process_action_requirement(req: str, action: str) -> str: + """Process requirement for a specific github action.""" + normalized_package_name = normalize_package_name(req) + if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["exclude"]: + return f"# {req}" + if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["include"]: + return req + if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: + return f"# {req}" + return req def gather_modules() -> dict[str, list[str]] | None: @@ -353,6 +403,16 @@ def generate_requirements_list(reqs: dict[str, list[str]]) -> str: return "".join(output) +def generate_action_requirements_list(reqs: dict[str, list[str]], action: str) -> str: + """Generate a pip file based on requirements.""" + output = [] + for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)): + output.extend(f"\n# {req}" for req in sorted(requirements)) + processed_pkg = process_action_requirement(pkg, action) + output.append(f"\n{processed_pkg}\n") + return "".join(output) + + def requirements_output() -> str: """Generate output for requirements.""" output = [ @@ -379,6 +439,18 @@ def requirements_all_output(reqs: dict[str, list[str]]) -> str: return "".join(output) +def requirements_all_action_output(reqs: dict[str, list[str]], action: str) -> str: + """Generate output for requirements_all_{action}.""" + output = [ + f"# Home Assistant Core, full dependency set for {action}\n", + GENERATED_MESSAGE, + "-r requirements.txt\n", + ] + output.append(generate_action_requirements_list(reqs, action)) + + return "".join(output) + + def requirements_test_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for test_requirements.""" output = [ @@ -459,7 +531,7 @@ def diff_file(filename: str, content: str) -> list[str]: ) -def main(validate: bool) -> int: +def main(validate: bool, ci: bool) -> int: """Run the script.""" if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") @@ -472,17 +544,28 @@ def main(validate: bool) -> int: reqs_file = requirements_output() reqs_all_file = requirements_all_output(data) + reqs_all_action_files = { + action: requirements_all_action_output(data, action) + for action in OVERRIDDEN_REQUIREMENTS_ACTIONS + } reqs_test_all_file = requirements_test_all_output(data) + # Always calling requirements_pre_commit_output is intentional to ensure + # the code is called by the pre-commit hooks. reqs_pre_commit_file = requirements_pre_commit_output() constraints = gather_constraints() - files = ( + files = [ ("requirements.txt", reqs_file), ("requirements_all.txt", reqs_all_file), ("requirements_test_pre_commit.txt", reqs_pre_commit_file), ("requirements_test_all.txt", reqs_test_all_file), ("homeassistant/package_constraints.txt", constraints), - ) + ] + if ci: + files.extend( + (f"requirements_all_{action}.txt", reqs_all_file) + for action, reqs_all_file in reqs_all_action_files.items() + ) if validate: errors = [] @@ -511,4 +594,5 @@ def main(validate: bool) -> int: if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" - sys.exit(main(_VAL)) + _CI = sys.argv[-1] == "ci" + sys.exit(main(_VAL, _CI)) diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index ee63bf07f90..2c4ed47b158 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -15,13 +15,13 @@ from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from tqdm import tqdm import homeassistant.util.package as pkg_util -from script.gen_requirements_all import COMMENT_REQUIREMENTS, normalize_package_name +from script.gen_requirements_all import ( + EXCLUDED_REQUIREMENTS_ALL, + normalize_package_name, +) from .model import Config, Integration -IGNORE_PACKAGES = { - commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS -} PACKAGE_REGEX = re.compile( r"^(?:--.+\s)?([-_,\.\w\d\[\]]+)(==|>=|<=|~=|!=|<|>|===)*(.*)$" ) @@ -116,7 +116,7 @@ def validate_requirements(integration: Integration) -> None: f"Failed to normalize package name from requirement {req}", ) return - if package in IGNORE_PACKAGES: + if package in EXCLUDED_REQUIREMENTS_ALL: continue integration_requirements.add(req) integration_packages.add(package) diff --git a/tests/script/__init__.py b/tests/script/__init__.py new file mode 100644 index 00000000000..209299782c9 --- /dev/null +++ b/tests/script/__init__.py @@ -0,0 +1 @@ +"""Tests for scripts.""" diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py new file mode 100644 index 00000000000..793b3de63c5 --- /dev/null +++ b/tests/script/test_gen_requirements_all.py @@ -0,0 +1,25 @@ +"""Tests for the gen_requirements_all script.""" + +from script import gen_requirements_all + + +def test_overrides_normalized() -> None: + """Test override lists are using normalized package names.""" + for req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL: + assert req == gen_requirements_all._normalize_package_name(req) + for req in gen_requirements_all.INCLUDED_REQUIREMENTS_WHEELS: + assert req == gen_requirements_all._normalize_package_name(req) + for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): + for req in overrides["exclude"]: + assert req == gen_requirements_all._normalize_package_name(req) + for req in overrides["include"]: + assert req == gen_requirements_all._normalize_package_name(req) + + +def test_include_overrides_subsets() -> None: + """Test packages in include override lists are present in the exclude list.""" + for req in gen_requirements_all.INCLUDED_REQUIREMENTS_WHEELS: + assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL + for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): + for req in overrides["include"]: + assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL From f2adae45240a78ab78fe6108dc957991ff80d5e5 Mon Sep 17 00:00:00 2001 From: Marc-Olivier Arsenault Date: Mon, 22 Apr 2024 13:28:08 -0400 Subject: [PATCH 249/426] Revert "Reduce ecobee throttle (#115968)" (#115981) --- homeassistant/components/ecobee/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index c9d45b512bd..8083d0efcb4 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -22,7 +22,7 @@ from .const import ( PLATFORMS, ) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=180) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Schema({vol.Optional(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA From 0ed56694b04f998e0f2815bdeb7e3f151d54ece0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 22 Apr 2024 20:09:45 +0200 Subject: [PATCH 250/426] Migrate MQTT EnsureJobAfterCooldown to use eager start (#115977) --- homeassistant/components/mqtt/client.py | 3 ++- tests/components/mqtt/test_discovery.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 021ecf1cc36..9a344e13023 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -41,6 +41,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util import dt as dt_util +from homeassistant.util.async_ import create_eager_task from homeassistant.util.logging import catch_log_exception from .const import ( @@ -352,7 +353,7 @@ class EnsureJobAfterCooldown: return self._async_cancel_timer() - self._task = asyncio.create_task(self._async_job()) + self._task = create_eager_task(self._async_job()) self._task.add_done_callback(self._async_task_done) @callback diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 24891895fad..a00af080bf1 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -1487,6 +1487,7 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( await async_start(hass, "homeassistant", entry) await hass.async_block_till_done() await hass.async_block_till_done() + await hass.async_block_till_done() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called @@ -1537,6 +1538,7 @@ async def test_mqtt_discovery_unsubscribe_once( await async_start(hass, "homeassistant", entry) await hass.async_block_till_done() await hass.async_block_till_done() + await hass.async_block_till_done() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called From 2ac44f60839cb572627f1e7b47a5c8f87adefa2f Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Mon, 22 Apr 2024 11:10:18 -0700 Subject: [PATCH 251/426] Make recorder.purge_entities require at least one entity filter value (#110066) Co-authored-by: J. Nick Koston --- homeassistant/components/recorder/services.py | 30 ++++++++++++++----- .../components/recorder/services.yaml | 9 +++--- .../components/recorder/strings.json | 4 +++ tests/components/recorder/test_purge.py | 11 +++---- 4 files changed, 37 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/recorder/services.py b/homeassistant/components/recorder/services.py index b4d719a9481..2be02fe8091 100644 --- a/homeassistant/components/recorder/services.py +++ b/homeassistant/components/recorder/services.py @@ -7,6 +7,7 @@ from typing import cast import voluptuous as vol +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entityfilter import generate_filter @@ -36,15 +37,28 @@ SERVICE_PURGE_SCHEMA = vol.Schema( ATTR_DOMAINS = "domains" ATTR_ENTITY_GLOBS = "entity_globs" -SERVICE_PURGE_ENTITIES_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_ENTITY_GLOBS, default=[]): vol.All( - cv.ensure_list, [cv.string] +SERVICE_PURGE_ENTITIES_SCHEMA = vol.All( + vol.Schema( + { + vol.Optional(ATTR_ENTITY_ID, default=[]): cv.entity_ids, + vol.Optional(ATTR_DOMAINS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_GLOBS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_KEEP_DAYS, default=0): cv.positive_int, + } + ), + vol.Any( + vol.Schema({vol.Required(ATTR_ENTITY_ID): vol.IsTrue()}, extra=vol.ALLOW_EXTRA), + vol.Schema({vol.Required(ATTR_DOMAINS): vol.IsTrue()}, extra=vol.ALLOW_EXTRA), + vol.Schema( + {vol.Required(ATTR_ENTITY_GLOBS): vol.IsTrue()}, extra=vol.ALLOW_EXTRA ), - vol.Optional(ATTR_KEEP_DAYS, default=0): cv.positive_int, - } -).extend(cv.ENTITY_SERVICE_FIELDS) + msg="At least one of entity_id, domains, or entity_globs must have a value", + ), +) SERVICE_ENABLE_SCHEMA = vol.Schema({}) SERVICE_DISABLE_SCHEMA = vol.Schema({}) diff --git a/homeassistant/components/recorder/services.yaml b/homeassistant/components/recorder/services.yaml index b74dcc2a494..7d7b926548c 100644 --- a/homeassistant/components/recorder/services.yaml +++ b/homeassistant/components/recorder/services.yaml @@ -20,20 +20,21 @@ purge: boolean: purge_entities: - target: - entity: {} fields: + entity_id: + required: false + selector: + entity: + multiple: true domains: example: "sun" required: false - default: [] selector: object: entity_globs: example: "domain*.object_id*" required: false - default: [] selector: object: diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index 74b248354d7..bf5d95ae1fc 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -41,6 +41,10 @@ "name": "Purge entities", "description": "Starts a purge task to remove the data related to specific entities from your database.", "fields": { + "entity_id": { + "name": "Entities to remove", + "description": "List of entities for which the data is to be removed from the recorder database." + }, "domains": { "name": "Domains to remove", "description": "List of domains for which the data needs to be removed from the recorder database." diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index b2da3f1d62f..e80bc7ca7d1 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -9,6 +9,7 @@ from freezegun import freeze_time import pytest from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session +from voluptuous.error import MultipleInvalid from homeassistant.components import recorder from homeassistant.components.recorder.const import SupportedDialect @@ -1446,20 +1447,20 @@ async def test_purge_entities( _add_purge_records(hass) - # Confirm calling service without arguments matches all records (default filter behavior) + # Confirm calling service without arguments is invalid with session_scope(hass=hass) as session: states = session.query(States) assert states.count() == 190 - await _purge_entities(hass, [], [], []) + with pytest.raises(MultipleInvalid): + await _purge_entities(hass, [], [], []) with session_scope(hass=hass, read_only=True) as session: states = session.query(States) - assert states.count() == 0 + assert states.count() == 190 - # The states_meta table should be empty states_meta_remain = session.query(StatesMeta) - assert states_meta_remain.count() == 0 + assert states_meta_remain.count() == 4 async def _add_test_states(hass: HomeAssistant, wait_recording_done: bool = True): From 5318a6f4650e440fbd7ade3b552e5cb46ef39161 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 22 Apr 2024 21:33:56 +0200 Subject: [PATCH 252/426] Bump holidays to 0.47 (#115992) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 5a1edcd3c3f..3494798b50b 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.46", "babel==2.13.1"] + "requirements": ["holidays==0.47", "babel==2.13.1"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 314f4c6bcf4..e0813cd90cd 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.46"] + "requirements": ["holidays==0.47"] } diff --git a/requirements_all.txt b/requirements_all.txt index 573f2f4a8d3..3df28349edd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1072,7 +1072,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.46 +holidays==0.47 # homeassistant.components.frontend home-assistant-frontend==20240404.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b3e0b9feaf6..eeb7014b62d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -874,7 +874,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.46 +holidays==0.47 # homeassistant.components.frontend home-assistant-frontend==20240404.2 From b69f589c30e3e1908f26edd3c9103a13ab1d8417 Mon Sep 17 00:00:00 2001 From: Kim de Vos Date: Mon, 22 Apr 2024 22:39:46 +0200 Subject: [PATCH 253/426] Add bandwidth sensor for unifi device ports (#115362) --- homeassistant/components/unifi/sensor.py | 37 ++++++ tests/components/unifi/test_sensor.py | 149 +++++++++++++++++++++++ 2 files changed, 186 insertions(+) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 7d9720cde1a..3979f45ecd8 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -11,6 +11,7 @@ from dataclasses import dataclass from datetime import date, datetime, timedelta from decimal import Decimal from functools import partial +from typing import cast from aiounifi.interfaces.api_handlers import ItemEvent from aiounifi.interfaces.clients import Clients @@ -239,6 +240,42 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( unique_id_fn=lambda hub, obj_id: f"poe_power-{obj_id}", value_fn=lambda _, obj: obj.poe_power if obj.poe_mode != "off" else "0", ), + UnifiSensorEntityDescription[Ports, Port]( + key="Port Bandwidth sensor RX", + device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, + suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, + icon="mdi:download", + allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, + api_handler_fn=lambda api: api.ports, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda port: f"{port.name} RX", + object_fn=lambda api, obj_id: api.ports[obj_id], + unique_id_fn=lambda hub, obj_id: f"port_rx-{obj_id}", + value_fn=lambda hub, port: cast(float, port.raw.get("rx_bytes-r", 0)), + ), + UnifiSensorEntityDescription[Ports, Port]( + key="Port Bandwidth sensor TX", + device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, + suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, + icon="mdi:upload", + allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, + api_handler_fn=lambda api: api.ports, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda port: f"{port.name} TX", + object_fn=lambda api, obj_id: api.ports[obj_id], + unique_id_fn=lambda hub, obj_id: f"port_tx-{obj_id}", + value_fn=lambda hub, port: cast(float, port.raw.get("tx_bytes-r", 0)), + ), UnifiSensorEntityDescription[Clients, Client]( key="Client uptime", device_class=SensorDeviceClass.TIMESTAMP, diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index e3b4ddd3b63..26eadfa498e 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1042,3 +1042,152 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "7.7" assert hass.states.get("sensor.device_memory_utilization").state == "33.3" + + +async def test_bandwidth_port_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, + mock_unifi_websocket, +) -> None: + """Verify that port bandwidth sensors are working as expected.""" + device_reponse = { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "port_table": [ + { + "media": "GE", + "name": "Port 1", + "port_idx": 1, + "poe_class": "Class 4", + "poe_enable": False, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a1", + "port_poe": False, + "up": True, + "rx_bytes-r": 1151, + "tx_bytes-r": 5111, + }, + { + "media": "GE", + "name": "Port 2", + "port_idx": 2, + "poe_class": "Class 4", + "poe_enable": False, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a2", + "port_poe": False, + "up": True, + "rx_bytes-r": 1536, + "tx_bytes-r": 3615, + }, + ], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + options = { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: False, + CONF_TRACK_CLIENTS: False, + CONF_TRACK_DEVICES: False, + } + + config_entry = await setup_unifi_integration( + hass, + aioclient_mock, + options=options, + devices_response=[device_reponse], + ) + + assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") + assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + + p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") + assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + + # Enable entity + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_1_rx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_1_tx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_2_rx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_2_tx", disabled_by=None + ) + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + # Validate state object + assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 + + # Verify sensor attributes and state + p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") + assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1rx_sensor.state == "0.00921" + + p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") + assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1tx_sensor.state == "0.04089" + + p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") + assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2rx_sensor.state == "0.01229" + + p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") + assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2tx_sensor.state == "0.02892" + + # Verify state update + device_reponse["port_table"][0]["rx_bytes-r"] = 3456000000 + device_reponse["port_table"][0]["tx_bytes-r"] = 7891000000 + + mock_unifi_websocket(message=MessageKey.DEVICE, data=device_reponse) + await hass.async_block_till_done() + + assert hass.states.get("sensor.mock_name_port_1_rx").state == "27648.00000" + assert hass.states.get("sensor.mock_name_port_1_tx").state == "63128.00000" + + # Disable option + options[CONF_ALLOW_BANDWIDTH_SENSORS] = False + hass.config_entries.async_update_entry(config_entry, options=options.copy()) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + assert hass.states.get("sensor.mock_name_uptime") + assert hass.states.get("sensor.mock_name_state") + assert hass.states.get("sensor.mock_name_port_1_rx") is None + assert hass.states.get("sensor.mock_name_port_1_tx") is None + assert hass.states.get("sensor.mock_name_port_2_rx") is None + assert hass.states.get("sensor.mock_name_port_2_tx") is None From c32961f1bc0cd2a17b4e851d2fbb0cf88b71feec Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 23 Apr 2024 07:48:25 +0200 Subject: [PATCH 254/426] Bump aiounifi to v76 (#116005) * Bump aiounifi to v76 --- homeassistant/components/unifi/manifest.json | 2 +- homeassistant/components/unifi/sensor.py | 5 ++--- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 305400a4b9d..982d654c8fe 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==75"], + "requirements": ["aiounifi==76"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 3979f45ecd8..cec87b36416 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -11,7 +11,6 @@ from dataclasses import dataclass from datetime import date, datetime, timedelta from decimal import Decimal from functools import partial -from typing import cast from aiounifi.interfaces.api_handlers import ItemEvent from aiounifi.interfaces.clients import Clients @@ -256,7 +255,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( name_fn=lambda port: f"{port.name} RX", object_fn=lambda api, obj_id: api.ports[obj_id], unique_id_fn=lambda hub, obj_id: f"port_rx-{obj_id}", - value_fn=lambda hub, port: cast(float, port.raw.get("rx_bytes-r", 0)), + value_fn=lambda hub, port: port.rx_bytes_r, ), UnifiSensorEntityDescription[Ports, Port]( key="Port Bandwidth sensor TX", @@ -274,7 +273,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( name_fn=lambda port: f"{port.name} TX", object_fn=lambda api, obj_id: api.ports[obj_id], unique_id_fn=lambda hub, obj_id: f"port_tx-{obj_id}", - value_fn=lambda hub, port: cast(float, port.raw.get("tx_bytes-r", 0)), + value_fn=lambda hub, port: port.tx_bytes_r, ), UnifiSensorEntityDescription[Clients, Client]( key="Client uptime", diff --git a/requirements_all.txt b/requirements_all.txt index 3df28349edd..058e8102e18 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -383,7 +383,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==75 +aiounifi==76 # homeassistant.components.vlc_telnet aiovlc==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eeb7014b62d..879f2b9123e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==75 +aiounifi==76 # homeassistant.components.vlc_telnet aiovlc==0.1.0 From 2fafdc64d5934fcd700f7d336c7e798546258b73 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 23 Apr 2024 08:48:35 +0200 Subject: [PATCH 255/426] Bump uv to 0.1.35 (#115985) --- Dockerfile | 2 +- requirements_test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 28b65d6383d..c916a3d2f3c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.1.27 +RUN pip3 install uv==0.1.35 WORKDIR /usr/src diff --git a/requirements_test.txt b/requirements_test.txt index f13e0e6a36b..e42a94091ad 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -50,4 +50,4 @@ types-pytz==2024.1.0.20240203 types-PyYAML==6.0.12.20240311 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 -uv==0.1.27 +uv==0.1.35 From 917f4136a7d6836c6cd349a561feb02c96394d2a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 23 Apr 2024 08:55:39 +0200 Subject: [PATCH 256/426] Add config flow to Folder Watcher (#105605) * Add config flow to Folder Watcher * Add tests config flow * docstrings * watcher is sync * Fix strings * Fix * setup_entry issue * ConfigFlowResult * Review comments * Review comment * ruff * new date --- .../components/folder_watcher/__init__.py | 70 +++++-- .../components/folder_watcher/config_flow.py | 116 +++++++++++ .../components/folder_watcher/const.py | 6 + .../components/folder_watcher/manifest.json | 1 + .../components/folder_watcher/strings.json | 46 +++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 2 +- tests/components/folder_watcher/conftest.py | 17 ++ .../folder_watcher/test_config_flow.py | 186 ++++++++++++++++++ 9 files changed, 430 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/folder_watcher/config_flow.py create mode 100644 homeassistant/components/folder_watcher/const.py create mode 100644 homeassistant/components/folder_watcher/strings.json create mode 100644 tests/components/folder_watcher/conftest.py create mode 100644 tests/components/folder_watcher/test_config_flow.py diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index d111fe03c5c..3f0b9e8f6da 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import os -from typing import cast +from typing import Any, cast import voluptuous as vol from watchdog.events import ( @@ -19,17 +19,17 @@ from watchdog.events import ( ) from watchdog.observers import Observer +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType +from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN + _LOGGER = logging.getLogger(__name__) -CONF_FOLDER = "folder" -CONF_PATTERNS = "patterns" -DEFAULT_PATTERN = "*" -DOMAIN = "folder_watcher" CONFIG_SCHEMA = vol.Schema( { @@ -51,20 +51,62 @@ CONFIG_SCHEMA = vol.Schema( ) -def setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the folder watcher.""" - conf = config[DOMAIN] - for watcher in conf: - path: str = watcher[CONF_FOLDER] - patterns: list[str] = watcher[CONF_PATTERNS] - if not hass.config.is_allowed_path(path): - _LOGGER.error("Folder %s is not valid or allowed", path) - return False - Watcher(path, patterns, hass) + if DOMAIN in config: + conf: list[dict[str, Any]] = config[DOMAIN] + for watcher in conf: + path: str = watcher[CONF_FOLDER] + if not hass.config.is_allowed_path(path): + async_create_issue( + hass, + DOMAIN, + f"import_failed_not_allowed_path_{path}", + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.ERROR, + translation_key="import_failed_not_allowed_path", + translation_placeholders={ + "path": path, + "config_variable": "allowlist_external_dirs", + }, + ) + continue + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=watcher + ) + ) return True +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Folder watcher from a config entry.""" + + path: str = entry.options[CONF_FOLDER] + patterns: list[str] = entry.options[CONF_PATTERNS] + if not hass.config.is_allowed_path(path): + _LOGGER.error("Folder %s is not valid or allowed", path) + async_create_issue( + hass, + DOMAIN, + f"setup_not_allowed_path_{path}", + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.ERROR, + translation_key="setup_not_allowed_path", + translation_placeholders={ + "path": path, + "config_variable": "allowlist_external_dirs", + }, + learn_more_url="https://www.home-assistant.io/docs/configuration/basic/#allowlist_external_dirs", + ) + return False + await hass.async_add_executor_job(Watcher, path, patterns, hass) + return True + + def create_event_handler(patterns: list[str], hass: HomeAssistant) -> EventHandler: """Return the Watchdog EventHandler object.""" diff --git a/homeassistant/components/folder_watcher/config_flow.py b/homeassistant/components/folder_watcher/config_flow.py new file mode 100644 index 00000000000..50d198df3c3 --- /dev/null +++ b/homeassistant/components/folder_watcher/config_flow.py @@ -0,0 +1,116 @@ +"""Adds config flow for Folder watcher.""" + +from __future__ import annotations + +from collections.abc import Mapping +import os +from typing import Any + +import voluptuous as vol + +from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.core import callback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowError, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN + + +async def validate_setup( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Check path is a folder.""" + value: str = user_input[CONF_FOLDER] + dir_in = os.path.expanduser(str(value)) + handler.parent_handler._async_abort_entries_match({CONF_FOLDER: value}) # pylint: disable=protected-access + + if not os.path.isdir(dir_in): + raise SchemaFlowError("not_dir") + if not os.access(dir_in, os.R_OK): + raise SchemaFlowError("not_readable_dir") + if not handler.parent_handler.hass.config.is_allowed_path(value): + raise SchemaFlowError("not_allowed_dir") + + return user_input + + +async def validate_import_setup( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Create issue on successful import.""" + async_create_issue( + handler.parent_handler.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.11.0", + is_fixable=False, + is_persistent=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Folder Watcher", + }, + ) + return user_input + + +OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): SelectSelector( + SelectSelectorConfig( + options=[DEFAULT_PATTERN], + multiple=True, + custom_value=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + } +) +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_FOLDER): TextSelector(), + } +).extend(OPTIONS_SCHEMA.schema) + +CONFIG_FLOW = { + "user": SchemaFlowFormStep(schema=DATA_SCHEMA, validate_user_input=validate_setup), + "import": SchemaFlowFormStep( + schema=DATA_SCHEMA, validate_user_input=validate_import_setup + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep(schema=OPTIONS_SCHEMA), +} + + +class FolderWatcherConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Folder Watcher.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return f"Folder Watcher {options[CONF_FOLDER]}" + + @callback + def async_create_entry( + self, data: Mapping[str, Any], **kwargs: Any + ) -> ConfigFlowResult: + """Finish config flow and create a config entry.""" + self._async_abort_entries_match({CONF_FOLDER: data[CONF_FOLDER]}) + return super().async_create_entry(data, **kwargs) diff --git a/homeassistant/components/folder_watcher/const.py b/homeassistant/components/folder_watcher/const.py new file mode 100644 index 00000000000..22dae3b9164 --- /dev/null +++ b/homeassistant/components/folder_watcher/const.py @@ -0,0 +1,6 @@ +"""Constants for Folder watcher.""" + +CONF_FOLDER = "folder" +CONF_PATTERNS = "patterns" +DEFAULT_PATTERN = "*" +DOMAIN = "folder_watcher" diff --git a/homeassistant/components/folder_watcher/manifest.json b/homeassistant/components/folder_watcher/manifest.json index 96decd0b8cf..7b471e08fcc 100644 --- a/homeassistant/components/folder_watcher/manifest.json +++ b/homeassistant/components/folder_watcher/manifest.json @@ -2,6 +2,7 @@ "domain": "folder_watcher", "name": "Folder Watcher", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/folder_watcher", "iot_class": "local_polling", "loggers": ["watchdog"], diff --git a/homeassistant/components/folder_watcher/strings.json b/homeassistant/components/folder_watcher/strings.json new file mode 100644 index 00000000000..bd1742b8ce3 --- /dev/null +++ b/homeassistant/components/folder_watcher/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "error": { + "not_dir": "Configured path is not a directory", + "not_readable_dir": "Configured path is not readable", + "not_allowed_dir": "Configured path is not in allowlist" + }, + "step": { + "user": { + "data": { + "folder": "Path to the watched folder", + "patterns": "Pattern(s) to monitor" + }, + "data_description": { + "folder": "Path needs to be from root, as example `/config`", + "patterns": "Example: `*.yaml` to only see yaml files" + } + } + } + }, + "options": { + "step": { + "init": { + "data": { + "patterns": "[%key:component::folder_watcher::config::step::user::data::patterns%]" + }, + "data_description": { + "patterns": "[%key:component::folder_watcher::config::step::user::data_description::patterns%]" + } + } + } + }, + "issues": { + "import_failed_not_allowed_path": { + "title": "The Folder Watcher YAML configuration could not be imported", + "description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in config.yaml and restart Home Assistant to import it and fix this issue." + }, + "setup_not_allowed_path": { + "title": "The Folder Watcher configuration for {path} could not start", + "description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in config.yaml and restart Home Assistant to fix this issue." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index e5d5f37ad5a..6f6ce237904 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -175,6 +175,7 @@ FLOWS = { "flo", "flume", "flux_led", + "folder_watcher", "forecast_solar", "forked_daapd", "foscam", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 0ee796d5376..e6a103989d1 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1956,7 +1956,7 @@ "folder_watcher": { "name": "Folder Watcher", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "foobot": { diff --git a/tests/components/folder_watcher/conftest.py b/tests/components/folder_watcher/conftest.py new file mode 100644 index 00000000000..06c0a41d49c --- /dev/null +++ b/tests/components/folder_watcher/conftest.py @@ -0,0 +1,17 @@ +"""Fixtures for Folder Watcher integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + + +@pytest.fixture +def mock_setup_entry() -> Generator[None, None, None]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.folder_watcher.async_setup_entry", return_value=True + ): + yield diff --git a/tests/components/folder_watcher/test_config_flow.py b/tests/components/folder_watcher/test_config_flow.py new file mode 100644 index 00000000000..745059717fb --- /dev/null +++ b/tests/components/folder_watcher/test_config_flow.py @@ -0,0 +1,186 @@ +"""Test the Folder Watcher config flow.""" + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.folder_watcher.const import ( + CONF_FOLDER, + CONF_PATTERNS, + DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +async def test_form(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we get the form.""" + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_allowed_path(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not allowed path.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_allowed_dir"} + + hass.config.allowlist_external_dirs = {tmp_path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_directory(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not a directory.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: "not_a_directory"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_dir"} + + hass.config.allowlist_external_dirs = {path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_readable_dir(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not able to read directory.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch("os.access", return_value=False): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_readable_dir"} + + hass.config.allowlist_external_dirs = {path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we abort when entry is already configured.""" + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + + entry = MockConfigEntry( + domain=DOMAIN, + title=f"Folder Watcher {path}", + data={CONF_FOLDER: path}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import(hass: HomeAssistant, tmp_path: Path) -> None: + """Test import flow.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path, CONF_PATTERNS: ["*"]}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_import_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we abort import when entry is already configured.""" + path = tmp_path.as_posix() + + entry = MockConfigEntry( + domain=DOMAIN, + title=f"Folder Watcher {path}", + data={CONF_FOLDER: path}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" From b8f44fb7229bdae693f23dbbfaeb4df75a231f88 Mon Sep 17 00:00:00 2001 From: Thomas Kistler Date: Tue, 23 Apr 2024 00:01:25 -0700 Subject: [PATCH 257/426] Update Hydrawise from the legacy API to the new GraphQL API (#106904) * Update Hydrawise from the legacy API to the new GraphQL API. * Cleanup --- .../components/hydrawise/__init__.py | 16 +++- .../components/hydrawise/config_flow.py | 66 ++++++++++++---- .../components/hydrawise/strings.json | 8 +- tests/components/hydrawise/conftest.py | 47 +++++++++++- .../components/hydrawise/test_config_flow.py | 75 +++++++++++++++++-- tests/components/hydrawise/test_init.py | 13 ++++ 6 files changed, 197 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 62a4cacc5c4..b4e14c42709 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -1,10 +1,11 @@ """Support for Hydrawise cloud.""" -from pydrawise import legacy +from pydrawise import auth, client from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY, Platform +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from .const import DOMAIN, SCAN_INTERVAL from .coordinator import HydrawiseDataUpdateCoordinator @@ -14,8 +15,15 @@ PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.S async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Hydrawise from a config entry.""" - access_token = config_entry.data[CONF_API_KEY] - hydrawise = legacy.LegacyHydrawiseAsync(access_token) + if CONF_USERNAME not in config_entry.data or CONF_PASSWORD not in config_entry.data: + # The GraphQL API requires username and password to authenticate. If either is + # missing, reauth is required. + raise ConfigEntryAuthFailed + + hydrawise = client.Hydrawise( + auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]) + ) + coordinator = HydrawiseDataUpdateCoordinator(hass, hydrawise, SCAN_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = coordinator diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 8233074c3cd..1c2c1c5cf29 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -2,15 +2,16 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Mapping from typing import Any from aiohttp import ClientError -from pydrawise import legacy +from pydrawise import auth, client +from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN, LOGGER @@ -20,14 +21,26 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _create_entry( - self, api_key: str, *, on_failure: Callable[[str], ConfigFlowResult] + def __init__(self) -> None: + """Construct a ConfigFlow.""" + self.reauth_entry: ConfigEntry | None = None + + async def _create_or_update_entry( + self, + username: str, + password: str, + *, + on_failure: Callable[[str], ConfigFlowResult], ) -> ConfigFlowResult: """Create the config entry.""" - api = legacy.LegacyHydrawiseAsync(api_key) + + # Verify that the provided credentials work.""" + api = client.Hydrawise(auth.Auth(username, password)) try: # Skip fetching zones to save on metered API calls. - user = await api.get_user(fetch_zones=False) + user = await api.get_user() + except NotAuthorizedError: + return on_failure("invalid_auth") except TimeoutError: return on_failure("timeout_connect") except ClientError as ex: @@ -35,17 +48,33 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): return on_failure("cannot_connect") await self.async_set_unique_id(f"hydrawise-{user.customer_id}") - self._abort_if_unique_id_configured() - return self.async_create_entry(title="Hydrawise", data={CONF_API_KEY: api_key}) + if not self.reauth_entry: + self._abort_if_unique_id_configured() + return self.async_create_entry( + title="Hydrawise", + data={CONF_USERNAME: username, CONF_PASSWORD: password}, + ) + + self.hass.config_entries.async_update_entry( + self.reauth_entry, + data=self.reauth_entry.data + | {CONF_USERNAME: username, CONF_PASSWORD: password}, + ) + await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial setup.""" if user_input is not None: - api_key = user_input[CONF_API_KEY] - return await self._create_entry(api_key, on_failure=self._show_form) + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + + return await self._create_or_update_entry( + username=username, password=password, on_failure=self._show_form + ) return self._show_form() def _show_form(self, error_type: str | None = None) -> ConfigFlowResult: @@ -54,6 +83,17 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = error_type return self.async_show_form( step_id="user", - data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), + data_schema=vol.Schema( + {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} + ), errors=errors, ) + + async def async_step_reauth( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth after updating config to username/password.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index 1c96098db35..ee5cc0a541c 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -2,8 +2,11 @@ "config": { "step": { "user": { + "title": "Hydrawise Login", + "description": "Please provide the username and password for your Hydrawise cloud account:", "data": { - "api_key": "[%key:common::config_flow::data::api_key%]" + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" } } }, @@ -13,7 +16,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index 8e22fbe84f7..11670cb3565 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -15,7 +15,7 @@ from pydrawise.schema import ( import pytest from homeassistant.components.hydrawise.const import DOMAIN -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -32,7 +32,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_pydrawise( +def mock_legacy_pydrawise( user: User, controller: Controller, zones: list[Zone], @@ -47,10 +47,32 @@ def mock_pydrawise( yield mock_pydrawise.return_value +@pytest.fixture +def mock_pydrawise( + mock_auth: AsyncMock, + user: User, + controller: Controller, + zones: list[Zone], +) -> Generator[AsyncMock, None, None]: + """Mock Hydrawise.""" + with patch("pydrawise.client.Hydrawise", autospec=True) as mock_pydrawise: + user.controllers = [controller] + controller.zones = zones + mock_pydrawise.return_value.get_user.return_value = user + yield mock_pydrawise.return_value + + +@pytest.fixture +def mock_auth() -> Generator[AsyncMock, None, None]: + """Mock pydrawise Auth.""" + with patch("pydrawise.auth.Auth", autospec=True) as mock_auth: + yield mock_auth.return_value + + @pytest.fixture def user() -> User: """Hydrawise User fixture.""" - return User(customer_id=12345) + return User(customer_id=12345, email="asdf@asdf.com") @pytest.fixture @@ -102,7 +124,7 @@ def zones() -> list[Zone]: @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry_legacy() -> MockConfigEntry: """Mock ConfigEntry.""" return MockConfigEntry( title="Hydrawise", @@ -111,6 +133,23 @@ def mock_config_entry() -> MockConfigEntry: CONF_API_KEY: "abc123", }, unique_id="hydrawise-customerid", + version=1, + ) + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock ConfigEntry.""" + return MockConfigEntry( + title="Hydrawise", + domain=DOMAIN, + data={ + CONF_USERNAME: "asfd@asdf.com", + CONF_PASSWORD: "__password__", + }, + unique_id="hydrawise-customerid", + version=1, + minor_version=2, ) diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index be0ef90becd..a7fbc008aab 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -3,14 +3,18 @@ from unittest.mock import AsyncMock from aiohttp import ClientError +from pydrawise.exceptions import NotAuthorizedError from pydrawise.schema import User import pytest from homeassistant import config_entries from homeassistant.components.hydrawise.const import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -29,16 +33,20 @@ async def test_form( assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {"api_key": "abc123"} + result["flow_id"], + {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, ) mock_pydrawise.get_user.return_value = user await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Hydrawise" - assert result2["data"] == {"api_key": "abc123"} + assert result2["data"] == { + CONF_USERNAME: "asdf@asdf.com", + CONF_PASSWORD: "__password__", + } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) + mock_pydrawise.get_user.assert_called_once_with() async def test_form_api_error( @@ -50,7 +58,7 @@ async def test_form_api_error( init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - data = {"api_key": "abc123"} + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} result = await hass.config_entries.flow.async_configure( init_result["flow_id"], data ) @@ -71,7 +79,7 @@ async def test_form_connect_timeout( init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - data = {"api_key": "abc123"} + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} result = await hass.config_entries.flow.async_configure( init_result["flow_id"], data ) @@ -83,3 +91,60 @@ async def test_form_connect_timeout( mock_pydrawise.get_user.return_value = user result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] is FlowResultType.CREATE_ENTRY + + +async def test_form_not_authorized_error( + hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +) -> None: + """Test we handle API errors.""" + mock_pydrawise.get_user.side_effect = NotAuthorizedError + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} + result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], data + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_pydrawise.get_user.reset_mock(side_effect=True) + mock_pydrawise.get_user.return_value = user + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result2["type"] is FlowResultType.CREATE_ENTRY + + +async def test_reauth( + hass: HomeAssistant, + user: User, + mock_pydrawise: AsyncMock, +) -> None: + """Test that re-authorization works.""" + mock_config_entry = MockConfigEntry( + title="Hydrawise", + domain=DOMAIN, + data={ + CONF_API_KEY: "__api_key__", + }, + unique_id="hydrawise-12345", + ) + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, + ) + mock_pydrawise.get_user.return_value = user + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" diff --git a/tests/components/hydrawise/test_init.py b/tests/components/hydrawise/test_init.py index 91c99833531..8ec3c3da648 100644 --- a/tests/components/hydrawise/test_init.py +++ b/tests/components/hydrawise/test_init.py @@ -19,3 +19,16 @@ async def test_connect_retry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_update_version( + hass: HomeAssistant, mock_config_entry_legacy: MockConfigEntry +) -> None: + """Test updating to the GaphQL API works.""" + mock_config_entry_legacy.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_legacy.entry_id) + await hass.async_block_till_done() + assert mock_config_entry_legacy.state is ConfigEntryState.SETUP_ERROR + + # Make sure reauth flow has been initiated + assert any(mock_config_entry_legacy.async_get_active_flows(hass, {"reauth"})) From e0c785b2b4b79965f31b43ccc074e2f31e114fa4 Mon Sep 17 00:00:00 2001 From: Shai Ungar Date: Tue, 23 Apr 2024 10:01:45 +0300 Subject: [PATCH 258/426] Add coordinator to 17Track (#115057) * Add coordinator to 17Track * Add coordinator to 17Track remove SensorEntityDescription (different PR) * Update homeassistant/components/seventeentrack/coordinator.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/seventeentrack/sensor.py Co-authored-by: Joost Lekkerkerker * Add coordinator to 17Track fix CR * Add coordinator to 17Track fix second CR * Add coordinator to 17Track remove commented out code + fix display name * Add coordinator to 17Track created a set outside _async_create_remove_entities function * Add coordinator to 17Track fix CR * Add coordinator to 17Track fix CR 2 * Update homeassistant/components/seventeentrack/coordinator.py Co-authored-by: Joost Lekkerkerker * Add coordinator to 17Track raise UpdateFailed if API throws an exception * Add coordinator to 17Track merge calls --------- Co-authored-by: Joost Lekkerkerker --- .../components/seventeentrack/__init__.py | 9 +- .../components/seventeentrack/const.py | 3 + .../components/seventeentrack/coordinator.py | 84 +++++ .../components/seventeentrack/sensor.py | 340 ++++++++---------- tests/components/seventeentrack/__init__.py | 2 +- tests/components/seventeentrack/conftest.py | 8 +- .../components/seventeentrack/test_sensor.py | 51 +-- 7 files changed, 257 insertions(+), 240 deletions(-) create mode 100644 homeassistant/components/seventeentrack/coordinator.py diff --git a/homeassistant/components/seventeentrack/__init__.py b/homeassistant/components/seventeentrack/__init__.py index 183d1bd4068..1f9879cdcbc 100644 --- a/homeassistant/components/seventeentrack/__init__.py +++ b/homeassistant/components/seventeentrack/__init__.py @@ -10,8 +10,9 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +from .coordinator import SeventeenTrackCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -25,8 +26,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SeventeenTrackError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = client + coordinator = SeventeenTrackCoordinator(hass, client) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index 6f8ae1b221c..fc7ca7b2e7f 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -1,6 +1,9 @@ """Constants for the 17track.net component.""" from datetime import timedelta +import logging + +LOGGER = logging.getLogger(__package__) ATTR_DESTINATION_COUNTRY = "destination_country" ATTR_INFO_TEXT = "info_text" diff --git a/homeassistant/components/seventeentrack/coordinator.py b/homeassistant/components/seventeentrack/coordinator.py new file mode 100644 index 00000000000..84bdf1e1359 --- /dev/null +++ b/homeassistant/components/seventeentrack/coordinator.py @@ -0,0 +1,84 @@ +"""Coordinator for 17Track.""" + +from dataclasses import dataclass +from typing import Any + +from py17track import Client as SeventeenTrackClient +from py17track.errors import SeventeenTrackError +from py17track.package import Package + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import slugify + +from .const import ( + CONF_SHOW_ARCHIVED, + CONF_SHOW_DELIVERED, + DEFAULT_SCAN_INTERVAL, + DOMAIN, + LOGGER, +) + + +@dataclass +class SeventeenTrackData: + """Class for handling the data retrieval.""" + + summary: dict[str, dict[str, Any]] + live_packages: dict[str, Package] + + +class SeventeenTrackCoordinator(DataUpdateCoordinator[SeventeenTrackData]): + """Class to manage fetching 17Track data.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, client: SeventeenTrackClient) -> None: + """Initialize.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=DEFAULT_SCAN_INTERVAL, + ) + self.show_delivered = self.config_entry.options[CONF_SHOW_DELIVERED] + self.account_id = client.profile.account_id + + self._show_archived = self.config_entry.options[CONF_SHOW_ARCHIVED] + self._client = client + + async def _async_update_data(self) -> SeventeenTrackData: + """Fetch data from 17Track API.""" + + try: + summary = await self._client.profile.summary( + show_archived=self._show_archived + ) + + live_packages = set( + await self._client.profile.packages(show_archived=self._show_archived) + ) + + except SeventeenTrackError as err: + raise UpdateFailed(err) from err + + summary_dict = {} + live_packages_dict = {} + + for status, quantity in summary.items(): + summary_dict[slugify(status)] = { + "quantity": quantity, + "packages": [], + "status_name": status, + } + + for package in live_packages: + live_packages_dict[package.tracking_number] = package + summary_value = summary_dict.get(slugify(package.status)) + if summary_value: + summary_value["packages"].append(package) + + return SeventeenTrackData( + summary=summary_dict, live_packages=live_packages_dict + ) diff --git a/homeassistant/components/seventeentrack/sensor.py b/homeassistant/components/seventeentrack/sensor.py index 1de627fab39..cbad01d0b0a 100644 --- a/homeassistant/components/seventeentrack/sensor.py +++ b/homeassistant/components/seventeentrack/sensor.py @@ -2,10 +2,8 @@ from __future__ import annotations -import logging +from typing import Any -from py17track.errors import SeventeenTrackError -from py17track.package import Package import voluptuous as vol from homeassistant.components import persistent_notification @@ -17,15 +15,15 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_validation as cv, entity, entity_registry as er +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from homeassistant.util import Throttle, slugify +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import SeventeenTrackCoordinator from .const import ( ATTR_DESTINATION_COUNTRY, ATTR_INFO_TEXT, @@ -39,17 +37,15 @@ from .const import ( ATTRIBUTION, CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, - DEFAULT_SCAN_INTERVAL, DOMAIN, ENTITY_ID_TEMPLATE, + LOGGER, NOTIFICATION_DELIVERED_MESSAGE, NOTIFICATION_DELIVERED_TITLE, UNIQUE_ID_TEMPLATE, VALUE_DELIVERED, ) -_LOGGER = logging.getLogger(__name__) - PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, @@ -111,81 +107,155 @@ async def async_setup_entry( ) -> None: """Set up a 17Track sensor entry.""" - client = hass.data[DOMAIN][config_entry.entry_id] + coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][config_entry.entry_id] + previous_tracking_numbers: set[str] = set() - data = SeventeenTrackData( - client, - async_add_entities, - DEFAULT_SCAN_INTERVAL, - config_entry.options[CONF_SHOW_ARCHIVED], - config_entry.options[CONF_SHOW_DELIVERED], - str(hass.config.time_zone), + @callback + def _async_create_remove_entities(): + live_tracking_numbers = set(coordinator.data.live_packages.keys()) + + new_tracking_numbers = live_tracking_numbers - previous_tracking_numbers + old_tracking_numbers = previous_tracking_numbers - live_tracking_numbers + + previous_tracking_numbers.update(live_tracking_numbers) + + packages_to_add = [ + coordinator.data.live_packages[tracking_number] + for tracking_number in new_tracking_numbers + ] + + for package_data in coordinator.data.live_packages.values(): + if ( + package_data.status == VALUE_DELIVERED + and not coordinator.show_delivered + ): + old_tracking_numbers.add(package_data.tracking_number) + notify_delivered( + hass, + package_data.friendly_name, + package_data.tracking_number, + ) + + remove_packages(hass, coordinator.account_id, old_tracking_numbers) + + async_add_entities( + SeventeenTrackPackageSensor( + coordinator, + package_data.tracking_number, + ) + for package_data in packages_to_add + if not ( + not coordinator.show_delivered and package_data.status == "Delivered" + ) + ) + + async_add_entities( + SeventeenTrackSummarySensor(status, summary_data["status_name"], coordinator) + for status, summary_data in coordinator.data.summary.items() + ) + + _async_create_remove_entities() + + config_entry.async_on_unload( + coordinator.async_add_listener(_async_create_remove_entities) ) - await data.async_update() -class SeventeenTrackSummarySensor(SensorEntity): +class SeventeenTrackSummarySensor( + CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity +): """Define a summary sensor.""" _attr_attribution = ATTRIBUTION _attr_icon = "mdi:package" _attr_native_unit_of_measurement = "packages" - def __init__(self, data, status, initial_state) -> None: - """Initialize.""" - self._attr_extra_state_attributes = {} - self._data = data - self._state = initial_state + def __init__( + self, + status: str, + status_name: str, + coordinator: SeventeenTrackCoordinator, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) self._status = status - self._attr_name = f"Seventeentrack Packages {status}" - self._attr_unique_id = f"summary_{data.account_id}_{slugify(status)}" + self._attr_name = f"Seventeentrack Packages {status_name}" + self._attr_unique_id = f"summary_{coordinator.account_id}_{self._status}" @property def available(self) -> bool: """Return whether the entity is available.""" - return self._state is not None + return self._status in self.coordinator.data.summary @property def native_value(self) -> StateType: - """Return the state.""" - return self._state + """Return the state of the sensor.""" + return self.coordinator.data.summary[self._status]["quantity"] - async def async_update(self) -> None: - """Update the sensor.""" - await self._data.async_update() - - package_data = [] - for package in self._data.packages.values(): - if package.status != self._status: - continue - - package_data.append( + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + packages = self.coordinator.data.summary[self._status]["packages"] + return { + ATTR_PACKAGES: [ { - ATTR_FRIENDLY_NAME: package.friendly_name, - ATTR_INFO_TEXT: package.info_text, - ATTR_TIMESTAMP: package.timestamp, - ATTR_STATUS: package.status, - ATTR_LOCATION: package.location, ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, } - ) - - self._attr_extra_state_attributes[ATTR_PACKAGES] = ( - package_data if package_data else None - ) - - self._state = self._data.summary.get(self._status) + for package in packages + ] + } -class SeventeenTrackPackageSensor(SensorEntity): +class SeventeenTrackPackageSensor( + CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity +): """Define an individual package sensor.""" _attr_attribution = ATTRIBUTION _attr_icon = "mdi:package" - def __init__(self, data, package) -> None: - """Initialize.""" - self._attr_extra_state_attributes = { + def __init__( + self, + coordinator: SeventeenTrackCoordinator, + tracking_number: str, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self._tracking_number = tracking_number + self._previous_status = coordinator.data.live_packages[tracking_number].status + self.entity_id = ENTITY_ID_TEMPLATE.format(tracking_number) + self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( + coordinator.account_id, tracking_number + ) + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return self._tracking_number in self.coordinator.data.live_packages + + @property + def name(self) -> str: + """Return the name.""" + package = self.coordinator.data.live_packages.get(self._tracking_number) + if package is None or not (name := package.friendly_name): + name = self._tracking_number + return f"Seventeentrack Package: {name}" + + @property + def native_value(self) -> StateType: + """Return the state.""" + return self.coordinator.data.live_packages[self._tracking_number].status + + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + package = self.coordinator.data.live_packages[self._tracking_number] + return { ATTR_DESTINATION_COUNTRY: package.destination_country, ATTR_INFO_TEXT: package.info_text, ATTR_TIMESTAMP: package.timestamp, @@ -195,158 +265,30 @@ class SeventeenTrackPackageSensor(SensorEntity): ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, ATTR_TRACKING_NUMBER: package.tracking_number, } - self._data = data - self._friendly_name = package.friendly_name - self._state = package.status - self._tracking_number = package.tracking_number - self.entity_id = ENTITY_ID_TEMPLATE.format(self._tracking_number) - self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( - data.account_id, self._tracking_number - ) - @property - def available(self) -> bool: - """Return whether the entity is available.""" - return self._data.packages.get(self._tracking_number) is not None - @property - def name(self) -> str: - """Return the name.""" - if not (name := self._friendly_name): - name = self._tracking_number - return f"Seventeentrack Package: {name}" - - @property - def native_value(self) -> StateType: - """Return the state.""" - return self._state - - async def async_update(self) -> None: - """Update the sensor.""" - await self._data.async_update() - - if not self.available: - # Entity cannot be removed while its being added - async_call_later(self.hass, 1, self._remove) - return - - package = self._data.packages.get(self._tracking_number, None) - - # If the user has elected to not see delivered packages and one gets - # delivered, post a notification: - if package.status == VALUE_DELIVERED and not self._data.show_delivered: - self._notify_delivered() - # Entity cannot be removed while its being added - async_call_later(self.hass, 1, self._remove) - return - - self._attr_extra_state_attributes.update( - { - ATTR_INFO_TEXT: package.info_text, - ATTR_TIMESTAMP: package.timestamp, - ATTR_LOCATION: package.location, - } - ) - self._state = package.status - self._friendly_name = package.friendly_name - - async def _remove(self, *_): - """Remove entity itself.""" - await self.async_remove(force_remove=True) - - reg = er.async_get(self.hass) +def remove_packages(hass: HomeAssistant, account_id: str, packages: set[str]) -> None: + """Remove entity itself.""" + reg = er.async_get(hass) + for package in packages: entity_id = reg.async_get_entity_id( "sensor", "seventeentrack", - UNIQUE_ID_TEMPLATE.format(self._data.account_id, self._tracking_number), + UNIQUE_ID_TEMPLATE.format(account_id, package), ) if entity_id: reg.async_remove(entity_id) - def _notify_delivered(self): - """Notify when package is delivered.""" - _LOGGER.info("Package delivered: %s", self._tracking_number) - identification = ( - self._friendly_name if self._friendly_name else self._tracking_number - ) - message = NOTIFICATION_DELIVERED_MESSAGE.format( - identification, self._tracking_number - ) - title = NOTIFICATION_DELIVERED_TITLE.format(identification) - notification_id = NOTIFICATION_DELIVERED_TITLE.format(self._tracking_number) +def notify_delivered(hass: HomeAssistant, friendly_name: str, tracking_number: str): + """Notify when package is delivered.""" + LOGGER.debug("Package delivered: %s", tracking_number) - persistent_notification.create( - self.hass, message, title=title, notification_id=notification_id - ) + identification = friendly_name if friendly_name else tracking_number + message = NOTIFICATION_DELIVERED_MESSAGE.format(identification, tracking_number) + title = NOTIFICATION_DELIVERED_TITLE.format(identification) + notification_id = NOTIFICATION_DELIVERED_TITLE.format(tracking_number) - -class SeventeenTrackData: - """Define a data handler for 17track.net.""" - - def __init__( - self, - client, - async_add_entities, - scan_interval, - show_archived, - show_delivered, - timezone, - ) -> None: - """Initialize.""" - self._async_add_entities = async_add_entities - self._client = client - self._scan_interval = scan_interval - self._show_archived = show_archived - self.account_id = client.profile.account_id - self.packages: dict[str, Package] = {} - self.show_delivered = show_delivered - self.timezone = timezone - self.summary: dict[str, int] = {} - self.async_update = Throttle(self._scan_interval)(self._async_update) - self.first_update = True - - async def _async_update(self): - """Get updated data from 17track.net.""" - entities: list[entity.Entity] = [] - - try: - packages = await self._client.profile.packages( - show_archived=self._show_archived, tz=self.timezone - ) - _LOGGER.debug("New package data received: %s", packages) - - new_packages = {p.tracking_number: p for p in packages} - - to_add = set(new_packages) - set(self.packages) - - _LOGGER.debug("Will add new tracking numbers: %s", to_add) - if to_add: - entities.extend( - SeventeenTrackPackageSensor(self, new_packages[tracking_number]) - for tracking_number in to_add - ) - - self.packages = new_packages - except SeventeenTrackError as err: - _LOGGER.error("There was an error retrieving packages: %s", err) - - try: - self.summary = await self._client.profile.summary( - show_archived=self._show_archived - ) - _LOGGER.debug("New summary data received: %s", self.summary) - - # creating summary sensors on first update - if self.first_update: - self.first_update = False - entities.extend( - SeventeenTrackSummarySensor(self, status, quantity) - for status, quantity in self.summary.items() - ) - - except SeventeenTrackError as err: - _LOGGER.error("There was an error retrieving the summary: %s", err) - self.summary = {} - - self._async_add_entities(entities, True) + persistent_notification.create( + hass, message, title=title, notification_id=notification_id + ) diff --git a/tests/components/seventeentrack/__init__.py b/tests/components/seventeentrack/__init__.py index 4101f34496e..b3452b38f96 100644 --- a/tests/components/seventeentrack/__init__.py +++ b/tests/components/seventeentrack/__init__.py @@ -4,7 +4,7 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.seventeentrack.sensor import DEFAULT_SCAN_INTERVAL +from homeassistant.components.seventeentrack.const import DEFAULT_SCAN_INTERVAL from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, async_fire_time_changed diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index 2865b3f2599..2e266a9b13c 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -7,12 +7,10 @@ from py17track.package import Package import pytest from homeassistant.components.seventeentrack.const import ( - DEFAULT_SHOW_ARCHIVED, - DEFAULT_SHOW_DELIVERED, -) -from homeassistant.components.seventeentrack.sensor import ( CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, + DEFAULT_SHOW_ARCHIVED, + DEFAULT_SHOW_DELIVERED, ) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -28,6 +26,8 @@ DEFAULT_SUMMARY = { "Returned": 0, } +DEFAULT_SUMMARY_LENGTH = len(DEFAULT_SUMMARY) + ACCOUNT_ID = "1234" NEW_SUMMARY_DATA = { diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index aa7f61ad318..27de64ca89f 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -14,6 +14,7 @@ from homeassistant.setup import async_setup_component from . import goto_future, init_integration from .conftest import ( DEFAULT_SUMMARY, + DEFAULT_SUMMARY_LENGTH, NEW_SUMMARY_DATA, VALID_PLATFORM_CONFIG_FULL, get_package, @@ -72,11 +73,10 @@ async def test_add_package( """Ensure package is added correctly when user add a new package.""" package = get_package() mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.seventeentrack_package_456") + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package2 = get_package( tracking_number="789", @@ -90,7 +90,7 @@ async def test_add_package( await goto_future(hass, freezer) assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 2 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 async def test_add_package_default_friendly_name( @@ -101,13 +101,12 @@ async def test_add_package_default_friendly_name( """Ensure package is added correctly with default friendly name when user add a new package without his own friendly name.""" package = get_package(friendly_name=None) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) state_456 = hass.states.get("sensor.seventeentrack_package_456") assert state_456 is not None assert state_456.attributes["friendly_name"] == "Seventeentrack Package: 456" - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 async def test_remove_package( @@ -130,26 +129,20 @@ async def test_remove_package( package1, package2, ] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) assert hass.states.get("sensor.seventeentrack_package_456") is not None assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 2 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 mock_seventeentrack.return_value.profile.packages.return_value = [package2] await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_456").state == "unavailable" - assert len(hass.states.async_entity_ids()) == 2 - - await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_456") is None assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 async def test_package_error( @@ -176,12 +169,11 @@ async def test_friendly_name_changed( """Test friendly name change.""" package = get_package() mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package = get_package(friendly_name="friendly name 2") mock_seventeentrack.return_value.profile.packages.return_value = [package] @@ -193,7 +185,7 @@ async def test_friendly_name_changed( "sensor.seventeentrack_package_456" ) assert entity.name == "Seventeentrack Package: friendly name 2" - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 async def test_delivered_not_shown( @@ -205,7 +197,6 @@ async def test_delivered_not_shown( """Ensure delivered packages are not shown.""" package = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} with patch( "homeassistant.components.seventeentrack.sensor.persistent_notification" @@ -213,7 +204,7 @@ async def test_delivered_not_shown( await init_integration(hass, mock_config_entry_with_default_options) await goto_future(hass, freezer) - assert not hass.states.async_entity_ids() + assert hass.states.get("sensor.seventeentrack_package_456") is None persistent_notification_mock.create.assert_called() @@ -225,7 +216,6 @@ async def test_delivered_shown( """Ensure delivered packages are show when user choose to show them.""" package = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} with patch( "homeassistant.components.seventeentrack.sensor.persistent_notification" @@ -233,7 +223,7 @@ async def test_delivered_shown( await init_integration(hass, mock_config_entry) assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 persistent_notification_mock.create.assert_not_called() @@ -246,12 +236,11 @@ async def test_becomes_delivered_not_shown_notification( """Ensure notification is triggered when package becomes delivered.""" package = get_package() mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry_with_default_options) assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package_delivered = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package_delivered] @@ -260,10 +249,9 @@ async def test_becomes_delivered_not_shown_notification( "homeassistant.components.seventeentrack.sensor.persistent_notification" ) as persistent_notification_mock: await goto_future(hass, freezer) - await goto_future(hass, freezer) persistent_notification_mock.create.assert_called() - assert not hass.states.async_entity_ids() + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH async def test_summary_correctly_updated( @@ -275,11 +263,10 @@ async def test_summary_correctly_updated( """Ensure summary entities are not duplicated.""" package = get_package(status=30) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = DEFAULT_SUMMARY await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 8 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 state_ready_picked = hass.states.get( "sensor.seventeentrack_packages_ready_to_be_picked_up" @@ -290,10 +277,9 @@ async def test_summary_correctly_updated( mock_seventeentrack.return_value.profile.packages.return_value = [] mock_seventeentrack.return_value.profile.summary.return_value = NEW_SUMMARY_DATA - await goto_future(hass, freezer) await goto_future(hass, freezer) - assert len(hass.states.async_entity_ids()) == 7 + assert len(hass.states.async_entity_ids()) == len(NEW_SUMMARY_DATA) for state in hass.states.async_all(): assert state.state == "1" @@ -301,7 +287,7 @@ async def test_summary_correctly_updated( "sensor.seventeentrack_packages_ready_to_be_picked_up" ) assert state_ready_picked is not None - assert state_ready_picked.attributes["packages"] is None + assert len(state_ready_picked.attributes["packages"]) == 0 async def test_summary_error( @@ -318,7 +304,7 @@ async def test_summary_error( await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == 0 assert ( hass.states.get("sensor.seventeentrack_packages_ready_to_be_picked_up") is None @@ -334,12 +320,11 @@ async def test_utc_timestamp( package = get_package(tz="Asia/Jakarta") mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 state_456 = hass.states.get("sensor.seventeentrack_package_456") assert state_456 is not None assert str(state_456.attributes.get("timestamp")) == "2020-08-10 03:32:00+00:00" From 616c7ce68b1ead9854ac8541fe230b8c5fa79eea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 09:23:45 +0200 Subject: [PATCH 259/426] Bump actions/download-artifact from 4.1.4 to 4.1.6 (#116017) --- .github/workflows/builder.yml | 4 ++-- .github/workflows/ci.yaml | 6 +++--- .github/workflows/wheels.yml | 10 +++++----- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index a440de225be..90c1c3692e9 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: translations @@ -458,7 +458,7 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: translations diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0dc8f34570c..11d0e04cec1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -778,7 +778,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: pytest_buckets - name: Compile English translations @@ -1090,7 +1090,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.3 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1223,7 +1223,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.3 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: pattern: coverage-* - name: Upload coverage to Codecov diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 6618eb9963b..0dacd45a22e 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -121,12 +121,12 @@ jobs: uses: actions/checkout@v4.1.3 - name: Download env_file - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: requirements_diff @@ -159,17 +159,17 @@ jobs: uses: actions/checkout@v4.1.3 - name: Download env_file - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.5 + uses: actions/download-artifact@v4.1.6 with: name: requirements_diff - name: Download requirements_all_wheels - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.6 with: name: requirements_all_wheels From e2b401397d2940bea87189842785d458d29fcd70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 09:24:32 +0200 Subject: [PATCH 260/426] Bump actions/upload-artifact from 4.3.1 to 4.3.3 (#116015) --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 18 +++++++++--------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 90c1c3692e9..bc70eafd3f4 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 11d0e04cec1..62daa6863d9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -717,7 +717,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: pytest_buckets path: pytest_buckets.txt @@ -813,14 +813,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -935,7 +935,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -943,7 +943,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1058,7 +1058,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1066,7 +1066,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1195,14 +1195,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 0dacd45a22e..2627ac70795 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -82,14 +82,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.2 + uses: actions/upload-artifact@v4.3.3 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +101,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From 8f56d170b9f1b4f7c840dadbb3468d5718651089 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 23 Apr 2024 09:48:17 +0200 Subject: [PATCH 261/426] Use generator expression in totalconnect (#116020) --- .../totalconnect/alarm_control_panel.py | 20 ++++++++----------- 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 9b2abedbf52..b0ad2f19069 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -38,21 +38,17 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect alarm panels based on a config entry.""" - alarms: list[TotalConnectAlarm] = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - for location in coordinator.client.locations.values(): - alarms.extend( - TotalConnectAlarm( - coordinator, - location, - partition_id, - ) - for partition_id in location.partitions + async_add_entities( + TotalConnectAlarm( + coordinator, + location, + partition_id, ) - - async_add_entities(alarms) + for location in coordinator.client.locations.values() + for partition_id in location.partitions + ) # Set up services platform = entity_platform.async_get_current_platform() From 3d59303433a02a2a2ac47e7e4543a25e7d995601 Mon Sep 17 00:00:00 2001 From: myhomeiot <70070601+myhomeiot@users.noreply.github.com> Date: Tue, 23 Apr 2024 10:50:41 +0300 Subject: [PATCH 262/426] Improve Vodafone Station empty/unavailable phone number detection (#115696) Vodafone Sercomm H300S model incorrectly reports phone_unavailable1/phone_unavailable2 flags. --- homeassistant/components/vodafone_station/sensor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 937c0220cbf..2a08a9b2ebe 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -107,12 +107,12 @@ SENSOR_TYPES: Final = ( VodafoneStationEntityDescription( key="phone_num1", translation_key="phone_num1", - is_suitable=lambda info: info["phone_unavailable1"] == "0", + is_suitable=lambda info: info["phone_num1"] != "", ), VodafoneStationEntityDescription( key="phone_num2", translation_key="phone_num2", - is_suitable=lambda info: info["phone_unavailable2"] == "0", + is_suitable=lambda info: info["phone_num2"] != "", ), VodafoneStationEntityDescription( key="sys_uptime", From e90d76b18de75d80c132cc3338b1ad44ed6f101c Mon Sep 17 00:00:00 2001 From: Martijn van der Pol Date: Tue, 23 Apr 2024 09:55:58 +0200 Subject: [PATCH 263/426] Don't raise errors when using datetime objects in `as_datetime` Jinja function/filter (#109062) * add support for datetime objects to as_datetime * change import of datetime.date --------- Co-authored-by: Erik Montnemery --- homeassistant/helpers/template.py | 14 ++++++++++---- tests/helpers/test_template.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 16379c1d05c..a1ba1279292 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -9,7 +9,7 @@ import collections.abc from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager, suppress from contextvars import ContextVar -from datetime import datetime, timedelta +from datetime import date, datetime, time, timedelta from functools import cache, lru_cache, partial, wraps import json import logging @@ -2001,12 +2001,12 @@ def square_root(value, default=_SENTINEL): def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True, default=_SENTINEL): """Filter to convert given timestamp to format.""" try: - date = dt_util.utc_from_timestamp(value) + result = dt_util.utc_from_timestamp(value) if local: - date = dt_util.as_local(date) + result = dt_util.as_local(result) - return date.strftime(date_format) + return result.strftime(date_format) except (ValueError, TypeError): # If timestamp can't be converted if default is _SENTINEL: @@ -2048,6 +2048,12 @@ def forgiving_as_timestamp(value, default=_SENTINEL): def as_datetime(value: Any, default: Any = _SENTINEL) -> Any: """Filter and to convert a time string or UNIX timestamp to datetime object.""" + # Return datetime.datetime object without changes + if type(value) is datetime: + return value + # Add midnight to datetime.date object + if type(value) is date: + return datetime.combine(value, time(0, 0, 0)) try: # Check for a valid UNIX timestamp string, int or float timestamp = float(value) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 524b8f47dfe..ec5b76964f7 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -1198,6 +1198,35 @@ def test_as_datetime_from_timestamp( ) +@pytest.mark.parametrize( + ("input", "output"), + [ + ( + "{% set dt = as_datetime('2024-01-01 16:00:00-08:00') %}", + "2024-01-01 16:00:00-08:00", + ), + ( + "{% set dt = as_datetime('2024-01-29').date() %}", + "2024-01-29 00:00:00", + ), + ], +) +def test_as_datetime_from_datetime( + hass: HomeAssistant, input: str, output: str +) -> None: + """Test using datetime.datetime or datetime.date objects as input.""" + + assert ( + template.Template(f"{input}{{{{ dt | as_datetime }}}}", hass).async_render() + == output + ) + + assert ( + template.Template(f"{input}{{{{ as_datetime(dt) }}}}", hass).async_render() + == output + ) + + @pytest.mark.parametrize( ("input", "default", "output"), [ From 640dc56c51b31ea5a34c8ba7d2eacf2ded19401b Mon Sep 17 00:00:00 2001 From: jan iversen Date: Tue, 23 Apr 2024 10:28:46 +0200 Subject: [PATCH 264/426] Deprecate modbus:restart service (#115754) --- homeassistant/components/modbus/modbus.py | 13 +++++++++++++ homeassistant/components/modbus/strings.json | 4 ++++ 2 files changed, 17 insertions(+) diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 0d1848e0d8e..bd7eed8235c 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -34,6 +34,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType @@ -234,6 +235,18 @@ async def async_modbus_setup( async def async_restart_hub(service: ServiceCall) -> None: """Restart Modbus hub.""" + async_create_issue( + hass, + DOMAIN, + "deprecated_restart", + breaks_in_ha_version="2024.11.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_restart", + ) + _LOGGER.warning( + "`modbus.restart`: is deprecated and will be removed in version 2024.11" + ) async_dispatcher_send(hass, SIGNAL_START_ENTITY) hub = hub_collect[service.data[ATTR_HUB]] await hub.async_restart() diff --git a/homeassistant/components/modbus/strings.json b/homeassistant/components/modbus/strings.json index 72d7a3ec5f1..f89f9a97d52 100644 --- a/homeassistant/components/modbus/strings.json +++ b/homeassistant/components/modbus/strings.json @@ -97,6 +97,10 @@ "no_entities": { "title": "Modbus {sub_1} contain no entities, entry not loaded.", "description": "Please add at least one entity to Modbus {sub_1} in your configuration.yaml file and restart Home Assistant to fix this issue." + }, + "deprecated_restart": { + "title": "`modbus.restart` is being removed", + "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` service." } } } From 9cdf7b435a4ef3f1fa2aa9ffa1c4db6260470f5f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 23 Apr 2024 11:06:29 +0200 Subject: [PATCH 265/426] Add uv version to wheels cache key [ci] (#116021) --- .github/workflows/ci.yaml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 62daa6863d9..5d38b0480b7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,7 +33,7 @@ on: type: boolean env: - CACHE_VERSION: 7 + CACHE_VERSION: 8 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 HA_SHORT_VERSION: "2024.5" @@ -452,8 +452,10 @@ jobs: check-latest: true - name: Generate partial uv restore key id: generate-uv-key - run: >- - echo "key=uv-${{ env.UV_CACHE_VERSION }}-${{ + run: | + uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3) + echo "version=${uv_version}" >> $GITHUB_OUTPUT + echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{ env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv @@ -473,7 +475,9 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ steps.generate-uv-key.outputs.key }} restore-keys: | - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ env.UV_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}- + ${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ + env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{ + env.HA_SHORT_VERSION }}- - name: Install additional OS dependencies if: steps.cache-venv.outputs.cache-hit != 'true' run: | From 85203aeb28fa8a31cfb905eec55e32dc505358be Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 23 Apr 2024 21:23:28 +1200 Subject: [PATCH 266/426] Bump aioesphomeapi to 24.3.0 (#116004) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 0e9a2bdc87f..cde44fa3231 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -15,7 +15,7 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "requirements": [ - "aioesphomeapi==24.2.0", + "aioesphomeapi==24.3.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 058e8102e18..b2ffc771db8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.2.0 +aioesphomeapi==24.3.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 879f2b9123e..7b45f2d56d4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -222,7 +222,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.2.0 +aioesphomeapi==24.3.0 # homeassistant.components.flo aioflo==2021.11.0 From 2977ec48720d28ba75120ad462687c368e724bce Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 23 Apr 2024 11:54:19 +0200 Subject: [PATCH 267/426] Add event platform to Lutron (#109121) * Add event platform to Lutron * Add event platform to Lutron * Fix * Fix * Fix * Add deprecation note * Fix * Fix * Update homeassistant/components/lutron/event.py * Update homeassistant/components/lutron/event.py * Fix --- .coveragerc | 1 + homeassistant/components/lutron/__init__.py | 74 +------------ homeassistant/components/lutron/event.py | 109 +++++++++++++++++++ homeassistant/components/lutron/strings.json | 15 +++ 4 files changed, 131 insertions(+), 68 deletions(-) create mode 100644 homeassistant/components/lutron/event.py diff --git a/.coveragerc b/.coveragerc index f6368de7d89..e4fe305a3bf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -741,6 +741,7 @@ omit = homeassistant/components/lutron/binary_sensor.py homeassistant/components/lutron/cover.py homeassistant/components/lutron/entity.py + homeassistant/components/lutron/event.py homeassistant/components/lutron/fan.py homeassistant/components/lutron/light.py homeassistant/components/lutron/switch.py diff --git a/homeassistant/components/lutron/__init__.py b/homeassistant/components/lutron/__init__.py index 517eb4c8350..828182547c2 100644 --- a/homeassistant/components/lutron/__init__.py +++ b/homeassistant/components/lutron/__init__.py @@ -3,31 +3,25 @@ from dataclasses import dataclass import logging -from pylutron import Button, Keypad, Led, Lutron, LutronEvent, OccupancyGroup, Output +from pylutron import Button, Keypad, Led, Lutron, OccupancyGroup, Output import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ID, - CONF_HOST, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from homeassistant.util import slugify from .const import DOMAIN PLATFORMS = [ Platform.BINARY_SENSOR, Platform.COVER, + Platform.EVENT, Platform.FAN, Platform.LIGHT, Platform.SCENE, @@ -105,69 +99,13 @@ async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: return True -class LutronButton: - """Representation of a button on a Lutron keypad. - - This is responsible for firing events as keypad buttons are pressed - (and possibly released, depending on the button type). It is not - represented as an entity; it simply fires events. - """ - - def __init__( - self, hass: HomeAssistant, area_name: str, keypad: Keypad, button: Button - ) -> None: - """Register callback for activity on the button.""" - name = f"{keypad.name}: {button.name}" - if button.name == "Unknown Button": - name += f" {button.number}" - self._hass = hass - self._has_release_event = ( - button.button_type is not None and "RaiseLower" in button.button_type - ) - self._id = slugify(name) - self._keypad = keypad - self._area_name = area_name - self._button_name = button.name - self._button = button - self._event = "lutron_event" - self._full_id = slugify(f"{area_name} {name}") - self._uuid = button.uuid - - button.subscribe(self.button_callback, None) - - def button_callback( - self, _button: Button, _context: None, event: LutronEvent, _params: dict - ) -> None: - """Fire an event about a button being pressed or released.""" - # Events per button type: - # RaiseLower -> pressed/released - # SingleAction -> single - action = None - if self._has_release_event: - if event == Button.Event.PRESSED: - action = "pressed" - else: - action = "released" - elif event == Button.Event.PRESSED: - action = "single" - - if action: - data = { - ATTR_ID: self._id, - ATTR_ACTION: action, - ATTR_FULL_ID: self._full_id, - ATTR_UUID: self._uuid, - } - self._hass.bus.fire(self._event, data) - - @dataclass(slots=True, kw_only=True) class LutronData: """Storage class for platform global data.""" client: Lutron binary_sensors: list[tuple[str, OccupancyGroup]] - buttons: list[LutronButton] + buttons: list[tuple[str, Keypad, Button]] covers: list[tuple[str, Output]] fans: list[tuple[str, Output]] lights: list[tuple[str, Output]] @@ -273,8 +211,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b led.legacy_uuid, entry_data.client.guid, ) - - entry_data.buttons.append(LutronButton(hass, area.name, keypad, button)) + if button.button_type: + entry_data.buttons.append((area.name, keypad, button)) if area.occupancy_group is not None: entry_data.binary_sensors.append((area.name, area.occupancy_group)) platform = Platform.BINARY_SENSOR diff --git a/homeassistant/components/lutron/event.py b/homeassistant/components/lutron/event.py new file mode 100644 index 00000000000..710f942a006 --- /dev/null +++ b/homeassistant/components/lutron/event.py @@ -0,0 +1,109 @@ +"""Support for Lutron events.""" + +from enum import StrEnum + +from pylutron import Button, Keypad, Lutron, LutronEvent + +from homeassistant.components.event import EventEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_ID +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify + +from . import ATTR_ACTION, ATTR_FULL_ID, ATTR_UUID, DOMAIN, LutronData +from .entity import LutronKeypad + + +class LutronEventType(StrEnum): + """Lutron event types.""" + + SINGLE_PRESS = "single_press" + PRESS = "press" + RELEASE = "release" + + +LEGACY_EVENT_TYPES: dict[LutronEventType, str] = { + LutronEventType.SINGLE_PRESS: "single", + LutronEventType.PRESS: "pressed", + LutronEventType.RELEASE: "released", +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Lutron event platform.""" + entry_data: LutronData = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + LutronEventEntity(area_name, keypad, button, entry_data.client) + for area_name, keypad, button in entry_data.buttons + ) + + +class LutronEventEntity(LutronKeypad, EventEntity): + """Representation of a Lutron keypad button.""" + + _attr_translation_key = "button" + + def __init__( + self, + area_name: str, + keypad: Keypad, + button: Button, + controller: Lutron, + ) -> None: + """Initialize the button.""" + super().__init__(area_name, button, controller, keypad) + if (name := button.name) == "Unknown Button": + name += f" {button.number}" + self._attr_name = name + self._has_release_event = ( + button.button_type is not None and "RaiseLower" in button.button_type + ) + if self._has_release_event: + self._attr_event_types = [LutronEventType.PRESS, LutronEventType.RELEASE] + else: + self._attr_event_types = [LutronEventType.SINGLE_PRESS] + + self._full_id = slugify(f"{area_name} {name}") + self._id = slugify(name) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + self._lutron_device.subscribe(self.handle_event, None) + + async def async_will_remove_from_hass(self) -> None: + """Unregister callbacks.""" + await super().async_will_remove_from_hass() + # Temporary solution until https://github.com/thecynic/pylutron/pull/93 gets merged + self._lutron_device._subscribers.remove((self.handle_event, None)) # pylint: disable=protected-access + + @callback + def handle_event( + self, button: Button, _context: None, event: LutronEvent, _params: dict + ) -> None: + """Handle received event.""" + action: LutronEventType | None = None + if self._has_release_event: + if event == Button.Event.PRESSED: + action = LutronEventType.PRESS + else: + action = LutronEventType.RELEASE + elif event == Button.Event.PRESSED: + action = LutronEventType.SINGLE_PRESS + + if action: + data = { + ATTR_ID: self._id, + ATTR_ACTION: LEGACY_EVENT_TYPES[action], + ATTR_FULL_ID: self._full_id, + ATTR_UUID: button.uuid, + } + self.hass.bus.fire("lutron_event", data) + self._trigger_event(action) + self.async_write_ha_state() diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index efa0a35d81a..0212c8845d5 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -22,6 +22,21 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, + "entity": { + "event": { + "button": { + "state_attributes": { + "event_type": { + "state": { + "single_press": "Single press", + "press": "Press", + "release": "Release" + } + } + } + } + } + }, "issues": { "deprecated_yaml_import_issue_cannot_connect": { "title": "The Lutron YAML configuration import cannot connect to server", From fd14695d26de06d7d82f7e39c3777e3d3c3b085a Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 23 Apr 2024 13:16:55 +0200 Subject: [PATCH 268/426] Bump deebot-client to 7.0.0 (#116025) --- homeassistant/components/ecovacs/event.py | 6 ++---- homeassistant/components/ecovacs/manifest.json | 2 +- homeassistant/components/ecovacs/select.py | 10 +++++----- homeassistant/components/ecovacs/util.py | 9 +++++++++ homeassistant/components/ecovacs/vacuum.py | 5 +++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/ecovacs/test_event.py | 2 +- 8 files changed, 23 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/ecovacs/event.py b/homeassistant/components/ecovacs/event.py index daac4a626ae..fb4c25c7559 100644 --- a/homeassistant/components/ecovacs/event.py +++ b/homeassistant/components/ecovacs/event.py @@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .controller import EcovacsController from .entity import EcovacsEntity +from .util import get_name_key async def async_setup_entry( @@ -54,10 +55,7 @@ class EcovacsLastJobEventEntity( # we trigger only on job done return - event_type = event.status.name.lower() - if event.status == CleanJobStatus.MANUAL_STOPPED: - event_type = "manually_stopped" - + event_type = get_name_key(event.status) self._trigger_event(event_type) self.async_write_ha_state() diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 52753e6eb39..2e088024215 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.9", "deebot-client==6.0.2"] + "requirements": ["py-sucks==0.9.9", "deebot-client==7.0.0"] } diff --git a/homeassistant/components/ecovacs/select.py b/homeassistant/components/ecovacs/select.py index 8a3def54e28..01d4c5aae6b 100644 --- a/homeassistant/components/ecovacs/select.py +++ b/homeassistant/components/ecovacs/select.py @@ -22,7 +22,7 @@ from .entity import ( EcovacsDescriptionEntity, EventT, ) -from .util import get_supported_entitites +from .util import get_name_key, get_supported_entitites @dataclass(kw_only=True, frozen=True) @@ -41,8 +41,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = ( EcovacsSelectEntityDescription[WaterInfoEvent]( device_capabilities=VacuumCapabilities, capability_fn=lambda caps: caps.water, - current_option_fn=lambda e: e.amount.display_name, - options_fn=lambda water: [amount.display_name for amount in water.types], + current_option_fn=lambda e: get_name_key(e.amount), + options_fn=lambda water: [get_name_key(amount) for amount in water.types], key="water_amount", translation_key="water_amount", entity_category=EntityCategory.CONFIG, @@ -50,8 +50,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = ( EcovacsSelectEntityDescription[WorkModeEvent]( device_capabilities=VacuumCapabilities, capability_fn=lambda caps: caps.clean.work_mode, - current_option_fn=lambda e: e.mode.display_name, - options_fn=lambda cap: [mode.display_name for mode in cap.types], + current_option_fn=lambda e: get_name_key(e.mode), + options_fn=lambda cap: [get_name_key(mode) for mode in cap.types], key="work_mode", translation_key="work_mode", entity_registry_enabled_default=False, diff --git a/homeassistant/components/ecovacs/util.py b/homeassistant/components/ecovacs/util.py index 14e69cd4b61..ab5db58c579 100644 --- a/homeassistant/components/ecovacs/util.py +++ b/homeassistant/components/ecovacs/util.py @@ -2,12 +2,15 @@ from __future__ import annotations +from enum import Enum import random import string from typing import TYPE_CHECKING from deebot_client.capabilities import Capabilities +from homeassistant.core import callback + from .entity import ( EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity, @@ -38,3 +41,9 @@ def get_supported_entitites( if isinstance(device.capabilities, description.device_capabilities) if (capability := description.capability_fn(device.capabilities)) ] + + +@callback +def get_name_key(enum: Enum) -> str: + """Return the lower case name of the enum.""" + return enum.name.lower() diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index d5016ab683d..0e990645d7c 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -33,6 +33,7 @@ from homeassistant.util import slugify from .const import DOMAIN from .controller import EcovacsController from .entity import EcovacsEntity +from .util import get_name_key _LOGGER = logging.getLogger(__name__) @@ -242,7 +243,7 @@ class EcovacsVacuum( self._rooms: list[Room] = [] self._attr_fan_speed_list = [ - level.display_name for level in capabilities.fan_speed.types + get_name_key(level) for level in capabilities.fan_speed.types ] async def async_added_to_hass(self) -> None: @@ -254,7 +255,7 @@ class EcovacsVacuum( self.async_write_ha_state() async def on_fan_speed(event: FanSpeedEvent) -> None: - self._attr_fan_speed = event.speed.display_name + self._attr_fan_speed = get_name_key(event.speed) self.async_write_ha_state() async def on_rooms(event: RoomsEvent) -> None: diff --git a/requirements_all.txt b/requirements_all.txt index b2ffc771db8..a5d370fce8b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -694,7 +694,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==6.0.2 +deebot-client==7.0.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b45f2d56d4..35c005fe4d3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -572,7 +572,7 @@ dbus-fast==2.21.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==6.0.2 +deebot-client==7.0.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/tests/components/ecovacs/test_event.py b/tests/components/ecovacs/test_event.py index 0e7adaad954..104a3bfc69e 100644 --- a/tests/components/ecovacs/test_event.py +++ b/tests/components/ecovacs/test_event.py @@ -76,7 +76,7 @@ async def test_last_job( await notify_and_wait( hass, event_bus, - ReportStatsEvent(0, 1, "spotArea", "3", CleanJobStatus.MANUAL_STOPPED, [1]), + ReportStatsEvent(0, 1, "spotArea", "3", CleanJobStatus.MANUALLY_STOPPED, [1]), ) assert (state := hass.states.get(state.entity_id)) From b8918d7d17440178edd8ede02c0bf9e0acb84f8c Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Tue, 23 Apr 2024 14:18:49 +0200 Subject: [PATCH 269/426] Add number platform to Husqvarna Automower (#115125) * Add number platform to Husqvarna Automower * use fixture to enable by default * replace state test with snapshot test * make property in entity description * send value as integer * give the exists functions something to do --- .../husqvarna_automower/__init__.py | 1 + .../components/husqvarna_automower/icons.json | 5 + .../components/husqvarna_automower/number.py | 95 +++++++++++++++++++ .../husqvarna_automower/strings.json | 5 + .../snapshots/test_number.ambr | 56 +++++++++++ .../husqvarna_automower/test_number.py | 77 +++++++++++++++ 6 files changed, 239 insertions(+) create mode 100644 homeassistant/components/husqvarna_automower/number.py create mode 100644 tests/components/husqvarna_automower/snapshots/test_number.ambr create mode 100644 tests/components/husqvarna_automower/test_number.py diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 03ab02429bb..fe6f6978014 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -21,6 +21,7 @@ PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.LAWN_MOWER, + Platform.NUMBER, Platform.SELECT, Platform.SENSOR, Platform.SWITCH, diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index ec11ef92d08..2ecbf9c198a 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -8,6 +8,11 @@ "default": "mdi:debug-step-into" } }, + "number": { + "cutting_height": { + "default": "mdi:grass" + } + }, "select": { "headlight_mode": { "default": "mdi:car-light-high" diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py new file mode 100644 index 00000000000..8745b93479d --- /dev/null +++ b/homeassistant/components/husqvarna_automower/number.py @@ -0,0 +1,95 @@ +"""Creates the number entities for the mower.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +import logging +from typing import Any + +from aioautomower.exceptions import ApiException +from aioautomower.model import MowerAttributes +from aioautomower.session import AutomowerSession + +from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import AutomowerDataUpdateCoordinator +from .entity import AutomowerBaseEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class AutomowerNumberEntityDescription(NumberEntityDescription): + """Describes Automower number entity.""" + + exists_fn: Callable[[MowerAttributes], bool] = lambda _: True + value_fn: Callable[[MowerAttributes], int] + set_value_fn: Callable[[AutomowerSession, str, float], Awaitable[Any]] + + +NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( + AutomowerNumberEntityDescription( + key="cutting_height", + translation_key="cutting_height", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_min_value=1, + native_max_value=9, + exists_fn=lambda data: data.cutting_height is not None, + value_fn=lambda data: data.cutting_height, + set_value_fn=lambda session, mower_id, cheight: session.set_cutting_height( + mower_id, int(cheight) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up number platform.""" + coordinator: AutomowerDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + AutomowerNumberEntity(mower_id, coordinator, description) + for mower_id in coordinator.data + for description in NUMBER_TYPES + if description.exists_fn(coordinator.data[mower_id]) + ) + + +class AutomowerNumberEntity(AutomowerBaseEntity, NumberEntity): + """Defining the AutomowerNumberEntity with AutomowerNumberEntityDescription.""" + + entity_description: AutomowerNumberEntityDescription + + def __init__( + self, + mower_id: str, + coordinator: AutomowerDataUpdateCoordinator, + description: AutomowerNumberEntityDescription, + ) -> None: + """Set up AutomowerNumberEntity.""" + super().__init__(mower_id, coordinator) + self.entity_description = description + self._attr_unique_id = f"{mower_id}_{description.key}" + + @property + def native_value(self) -> float: + """Return the state of the number.""" + return self.entity_description.value_fn(self.mower_attributes) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.api, self.mower_id, value + ) + except ApiException as exception: + raise HomeAssistantError( + f"Command couldn't be sent to the command queue: {exception}" + ) from exception diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 0a2d3685c6e..b4c1c97cd68 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -37,6 +37,11 @@ "name": "Returning to dock" } }, + "number": { + "cutting_height": { + "name": "Cutting height" + } + }, "select": { "headlight_mode": { "name": "Headlight mode", diff --git a/tests/components/husqvarna_automower/snapshots/test_number.ambr b/tests/components/husqvarna_automower/snapshots/test_number.ambr new file mode 100644 index 00000000000..a5479345bd1 --- /dev/null +++ b/tests/components/husqvarna_automower/snapshots/test_number.ambr @@ -0,0 +1,56 @@ +# serializer version: 1 +# name: test_snapshot_number[number.test_mower_1_cutting_height-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 1, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.test_mower_1_cutting_height', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cutting height', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutting_height', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_cutting_height', + 'unit_of_measurement': None, + }) +# --- +# name: test_snapshot_number[number.test_mower_1_cutting_height-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Cutting height', + 'max': 9, + 'min': 1, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.test_mower_1_cutting_height', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py new file mode 100644 index 00000000000..abf56df1c0b --- /dev/null +++ b/tests/components/husqvarna_automower/test_number.py @@ -0,0 +1,77 @@ +"""Tests for number platform.""" + +from unittest.mock import AsyncMock, patch + +from aioautomower.exceptions import ApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_commands( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test number commands.""" + entity_id = "number.test_mower_1_cutting_height" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + domain="number", + service="set_value", + target={"entity_id": entity_id}, + service_data={"value": "3"}, + blocking=True, + ) + mocked_method = mock_automower_client.set_cutting_height + assert len(mocked_method.mock_calls) == 1 + + mocked_method.side_effect = ApiException("Test error") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + domain="number", + service="set_value", + target={"entity_id": entity_id}, + service_data={"value": "3"}, + blocking=True, + ) + assert ( + str(exc_info.value) + == "Command couldn't be sent to the command queue: Test error" + ) + assert len(mocked_method.mock_calls) == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_snapshot_number( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the number entity.""" + with patch( + "homeassistant.components.husqvarna_automower.PLATFORMS", + [Platform.NUMBER], + ): + await setup_integration(hass, mock_config_entry) + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + + assert entity_entries + for entity_entry in entity_entries: + assert hass.states.get(entity_entry.entity_id) == snapshot( + name=f"{entity_entry.entity_id}-state" + ) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") From 2c651e190f0387d91e4cd1d8052940c4c4749714 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Tue, 23 Apr 2024 14:26:53 +0200 Subject: [PATCH 270/426] Add additional zeroconf discovery coverage and logging to enphase_envoy (#114405) * add debug info to zeroconf for enphase_envoy * Implement review feedback, lost space Co-authored-by: Charles Garwood * review feedback textual changes. * implement review feedbackw.py Co-authored-by: J. Nick Koston * Add some more zeroconf tests and valid jwt * review feedback assert abort reason and keyerror for serialnumber * Review feedback config flow test ends with abort or create_entry * Review feedback optimize resource usage * Cover new code in test. * Use caplog for debug COV --------- Co-authored-by: Charles Garwood Co-authored-by: J. Nick Koston --- .../components/enphase_envoy/config_flow.py | 18 ++ tests/components/enphase_envoy/conftest.py | 6 +- .../enphase_envoy/test_config_flow.py | 239 +++++++++++++++++- 3 files changed, 261 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 13894d423d6..5f859d16142 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -89,6 +89,14 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" + if _LOGGER.isEnabledFor(logging.DEBUG): + current_hosts = self._async_current_hosts() + _LOGGER.debug( + "Zeroconf ip %s processing %s, current hosts: %s", + discovery_info.ip_address.version, + discovery_info.host, + current_hosts, + ) if discovery_info.ip_address.version != 4: return self.async_abort(reason="not_ipv4_address") serial = discovery_info.properties["serialnum"] @@ -96,17 +104,27 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(serial) self.ip_address = discovery_info.host self._abort_if_unique_id_configured({CONF_HOST: self.ip_address}) + _LOGGER.debug( + "Zeroconf ip %s, fw %s, no existing entry with serial %s", + self.ip_address, + self.protovers, + serial, + ) for entry in self._async_current_entries(include_ignore=False): if ( entry.unique_id is None and CONF_HOST in entry.data and entry.data[CONF_HOST] == self.ip_address ): + _LOGGER.debug( + "Zeroconf update envoy with this ip and blank serial in unique_id", + ) title = f"{ENVOY} {serial}" if entry.title == ENVOY else ENVOY return self.async_update_reload_and_abort( entry, title=title, unique_id=serial, reason="already_configured" ) + _LOGGER.debug("Zeroconf ip %s to step user", self.ip_address) return await self.async_step_user() async def async_step_reauth( diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 40d409aea8e..4d50f026c55 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock, Mock, patch +import jwt from pyenphase import ( Envoy, EnvoyData, @@ -368,7 +369,10 @@ def mock_authenticate(): @pytest.fixture(name="mock_auth") def mock_auth(serial_number): """Define a mocked EnvoyAuth fixture.""" - return EnvoyTokenAuth("127.0.0.1", token="abc", envoy_serial=serial_number) + token = jwt.encode( + payload={"name": "envoy", "exp": 1907837780}, key="secret", algorithm="HS256" + ) + return EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial=serial_number) @pytest.fixture(name="mock_setup") diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 7af0cd584a4..2709087a543 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -1,6 +1,7 @@ """Test the Enphase Envoy config flow.""" from ipaddress import ip_address +import logging from unittest.mock import AsyncMock from pyenphase import EnvoyAuthenticationError, EnvoyError @@ -13,6 +14,10 @@ from homeassistant.components.enphase_envoy.const import DOMAIN, PLATFORMS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + +_LOGGER = logging.getLogger(__name__) + async def test_form(hass: HomeAssistant, config, setup_enphase_envoy) -> None: """Test we get the form.""" @@ -324,9 +329,13 @@ async def test_form_host_already_exists( async def test_zeroconf_serial_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry, + setup_enphase_envoy, + caplog: pytest.LogCaptureFixture, ) -> None: """Test serial number already exists from zeroconf.""" + _LOGGER.setLevel(logging.DEBUG) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, @@ -345,6 +354,7 @@ async def test_zeroconf_serial_already_exists( assert result["reason"] == "already_configured" assert config_entry.data["host"] == "4.4.4.4" + assert "Zeroconf ip 4 processing 4.4.4.4, current hosts: {'1.1.1.1'}" in caplog.text async def test_zeroconf_serial_already_exists_ignores_ipv6( @@ -397,6 +407,233 @@ async def test_zeroconf_host_already_exists( assert config_entry.title == "Envoy 1234" +async def test_zero_conf_while_form( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test zeroconf while form is active.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.0.1"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + +async def test_zero_conf_second_envoy_while_form( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test zeroconf while form is active.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("4.4.4.4"), + ip_addresses=[ip_address("4.4.4.4")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "4321", "protovers": "7.0.1"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "host": "4.4.4.4", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 4321" + assert result3["result"].unique_id == "4321" + + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result4["type"] is FlowResultType.ABORT + + +async def test_zero_conf_malformed_serial_property( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf properties.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + with pytest.raises(KeyError) as ex: + await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serilnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert "serialnum" in str(ex.value) + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.ABORT + + +async def test_zero_conf_malformed_serial( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf properties.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "12%4", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 12%4" + + +async def test_zero_conf_malformed_fw_property( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf property.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protvers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + +async def test_zero_conf_old_blank_entry( + hass: HomeAssistant, setup_enphase_envoy +) -> None: + """Test re-using old blank entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + "host": "1.1.1.1", + "username": "", + "password": "", + "name": "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data["host"] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy 1234" + + async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> None: """Test we reauth auth.""" result = await hass.config_entries.flow.async_init( From fced9eb4b5b713ab9425ca83d9d20d45252b42ce Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 23 Apr 2024 14:33:05 +0200 Subject: [PATCH 271/426] Use location name on self hosted Ecovacs config entries (#115294) --- homeassistant/components/ecovacs/config_flow.py | 2 +- homeassistant/components/ecovacs/controller.py | 5 +++-- homeassistant/components/ecovacs/util.py | 8 ++++++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/ecovacs/config_flow.py b/homeassistant/components/ecovacs/config_flow.py index a1ea19144b0..4a421113f5f 100644 --- a/homeassistant/components/ecovacs/config_flow.py +++ b/homeassistant/components/ecovacs/config_flow.py @@ -71,7 +71,7 @@ async def _validate_input( if errors: return errors - device_id = get_client_device_id() + device_id = get_client_device_id(hass, rest_url is not None) country = user_input[CONF_COUNTRY] rest_config = create_rest_config( aiohttp_client.async_get_clientsession(hass), diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 5defcdf861f..6b6fe3128dd 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -43,7 +43,8 @@ class EcovacsController: self._hass = hass self._devices: list[Device] = [] self.legacy_devices: list[VacBot] = [] - self._device_id = get_client_device_id() + rest_url = config.get(CONF_OVERRIDE_REST_URL) + self._device_id = get_client_device_id(hass, rest_url is not None) country = config[CONF_COUNTRY] self._continent = get_continent(country) @@ -52,7 +53,7 @@ class EcovacsController: aiohttp_client.async_get_clientsession(self._hass), device_id=self._device_id, alpha_2_country=country, - override_rest_url=config.get(CONF_OVERRIDE_REST_URL), + override_rest_url=rest_url, ), config[CONF_USERNAME], md5(config[CONF_PASSWORD]), diff --git a/homeassistant/components/ecovacs/util.py b/homeassistant/components/ecovacs/util.py index ab5db58c579..9d692bbbb8f 100644 --- a/homeassistant/components/ecovacs/util.py +++ b/homeassistant/components/ecovacs/util.py @@ -9,7 +9,8 @@ from typing import TYPE_CHECKING from deebot_client.capabilities import Capabilities -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.util import slugify from .entity import ( EcovacsCapabilityEntityDescription, @@ -21,8 +22,11 @@ if TYPE_CHECKING: from .controller import EcovacsController -def get_client_device_id() -> str: +def get_client_device_id(hass: HomeAssistant, self_hosted: bool) -> str: """Get client device id.""" + if self_hosted: + return f"HA-{slugify(hass.config.location_name)}" + return "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) From d367bc63f087d06c1994bbd57ae1bbd9862feb57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A5le=20Stor=C3=B8=20Hauknes?= Date: Tue, 23 Apr 2024 15:53:31 +0200 Subject: [PATCH 272/426] Fix KeyError error when fetching sensors (Airthings) (#115844) --- homeassistant/components/airthings/sensor.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/homeassistant/components/airthings/sensor.py b/homeassistant/components/airthings/sensor.py index fc91d816aca..f0a3dc5be8f 100644 --- a/homeassistant/components/airthings/sensor.py +++ b/homeassistant/components/airthings/sensor.py @@ -157,3 +157,11 @@ class AirthingsHeaterEnergySensor( def native_value(self) -> StateType: """Return the value reported by the sensor.""" return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return] + + @property + def available(self) -> bool: + """Check if device and sensor is available in data.""" + return ( + super().available + and self.entity_description.key in self.coordinator.data[self._id].sensors + ) From a0314cddd4794793e4e7c81fe30fc8bd982526e8 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 23 Apr 2024 16:02:16 +0200 Subject: [PATCH 273/426] Fix invalid tuple annotations (#116035) --- homeassistant/components/imap/coordinator.py | 8 ++++---- tests/components/group/test_init.py | 6 +++--- tests/components/mqtt/test_common.py | 2 +- tests/components/mqtt/test_light_json.py | 4 ++-- tests/test_exceptions.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index 53d24044b53..c0123b89ee4 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -125,13 +125,13 @@ class ImapMessage: return str(part.get_payload()) @property - def headers(self) -> dict[str, tuple[str,]]: + def headers(self) -> dict[str, tuple[str, ...]]: """Get the email headers.""" - header_base: dict[str, tuple[str,]] = {} + header_base: dict[str, tuple[str, ...]] = {} for key, value in self.email_message.items(): - header_instances: tuple[str,] = (str(value),) + header_instances: tuple[str, ...] = (str(value),) if header_base.setdefault(key, header_instances) != header_instances: - header_base[key] += header_instances # type: ignore[assignment] + header_base[key] += header_instances return header_base @property diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 9c2f14f5d74..d3f2747933e 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -663,9 +663,9 @@ async def test_is_on(hass: HomeAssistant) -> None: ) async def test_is_on_and_state_mixed_domains( hass: HomeAssistant, - domains: tuple[str,], - states_old: tuple[str,], - states_new: tuple[str,], + domains: tuple[str, ...], + states_old: tuple[str, ...], + states_new: tuple[str, ...], state_ison_group_old: tuple[str, bool], state_ison_group_new: tuple[str, bool], ) -> None: diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index 9dc52871529..e9c3b57777f 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -83,7 +83,7 @@ def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: def help_custom_config( mqtt_entity_domain: str, mqtt_base_config: ConfigType, - mqtt_entity_configs: Iterable[ConfigType,], + mqtt_entity_configs: Iterable[ConfigType], ) -> ConfigType: """Tweak a default config for parametrization. diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index ff1b308ef70..739240a352c 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -236,7 +236,7 @@ async def test_warning_if_color_mode_flags_are_used( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, - color_modes: tuple[str,], + color_modes: tuple[str, ...], ) -> None: """Test warnings deprecated config keys without supported color modes defined.""" with patch( @@ -278,7 +278,7 @@ async def test_warning_on_discovery_if_color_mode_flags_are_used( mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, config: dict[str, Any], - color_modes: tuple[str,], + color_modes: tuple[str, ...], ) -> None: """Test warnings deprecated config keys with discovery.""" with patch( diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 5e113d3ba10..9d556b55b15 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -102,7 +102,7 @@ def test_template_message(arg: str | Exception, expected: str) -> None: ) async def test_home_assistant_error( hass: HomeAssistant, - exception_args: tuple[Any,], + exception_args: tuple[Any, ...], exception_kwargs: dict[str, Any], args_base_class: tuple[Any], message: str, From 1649957e5cd0b46b285cec49682645f32c516912 Mon Sep 17 00:00:00 2001 From: Spacetech Date: Tue, 23 Apr 2024 07:39:11 -0700 Subject: [PATCH 274/426] Expose dynamic range status in Onkyo media player (#109099) Expose HDR status in Onkyo media player Co-authored-by: Erik Montnemery --- homeassistant/components/onkyo/media_player.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index c0503e6e850..7575443c793 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -442,6 +442,7 @@ class OnkyoDevice(MediaPlayerEntity): "output_color_schema": _tuple_get(values, 6), "output_color_depth": _tuple_get(values, 7), "picture_mode": _tuple_get(values, 8), + "dynamic_range": _tuple_get(values, 9), } self._attr_extra_state_attributes[ATTR_VIDEO_INFORMATION] = info else: From 90efe5ac9082516d7b3214c4a05db7b06cc15a96 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Tue, 23 Apr 2024 16:44:37 +0200 Subject: [PATCH 275/426] Velbus Cover: Assume state for VMBxBL modules (#109213) --- homeassistant/components/velbus/cover.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index f37de104659..823d682d339 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -34,6 +34,7 @@ class VelbusCover(VelbusEntity, CoverEntity): """Representation a Velbus cover.""" _channel: VelbusBlind + _assumed_closed: bool def __init__(self, channel: VelbusBlind) -> None: """Initialize the cover.""" @@ -51,11 +52,16 @@ class VelbusCover(VelbusEntity, CoverEntity): | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP ) + self._attr_assumed_state = True + # guess the state to get the open/closed icons somewhat working + self._assumed_closed = False @property def is_closed(self) -> bool | None: """Return if the cover is closed.""" - return self._channel.is_closed() + if self._channel.support_position(): + return self._channel.is_closed() + return self._assumed_closed @property def is_opening(self) -> bool: @@ -83,11 +89,13 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() + self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() + self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: From 5e250d8a76a88f1fcc91fa4a12b926f3c1ce0f43 Mon Sep 17 00:00:00 2001 From: Volker Stolz Date: Tue, 23 Apr 2024 17:13:25 +0200 Subject: [PATCH 276/426] Augment SyntaxError raised during dependency collection with offending filename (#109204) * Capture parsing exception when collecting dependencies and augment with offending filename. Whereas before any syntax error in some component-file would result in an opaque SyntaxError without pointing out the file, now the result will show as: ``` File "/usr/local/Cellar/python@3.11/3.11.7_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/multiprocessing/pool.py", line 873, in next raise value SyntaxError: Can't parse file homeassistant/components/your/file.py ``` * tweak * D'oh, had pre-commit hook still off. --------- Co-authored-by: Erik Montnemery --- script/hassfest/dependencies.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 6fe7700cb3f..1547bc1e829 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -32,7 +32,11 @@ class ImportCollector(ast.NodeVisitor): self._cur_fil_dir = fil.relative_to(self.integration.path) self.referenced[self._cur_fil_dir] = set() - self.visit(ast.parse(fil.read_text())) + try: + self.visit(ast.parse(fil.read_text())) + except SyntaxError as e: + e.add_note(f"File: {fil}") + raise self._cur_fil_dir = None def _add_reference(self, reference_domain: str) -> None: From 14e19c6d9cd6388af8b60575ff67aa27fe6d3972 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:32:21 +0200 Subject: [PATCH 277/426] Remove unnecessary type ignores (#116036) --- homeassistant/components/alexa/intent.py | 8 +++++--- homeassistant/components/automation/logbook.py | 12 ++++++++++-- homeassistant/components/evohome/__init__.py | 8 ++++---- homeassistant/components/feedreader/__init__.py | 2 +- homeassistant/components/geniushub/water_heater.py | 4 ++-- homeassistant/components/group/notify.py | 4 ++-- homeassistant/components/input_text/__init__.py | 2 +- 7 files changed, 25 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/alexa/intent.py b/homeassistant/components/alexa/intent.py index fdf72ccce28..217d5dccc25 100644 --- a/homeassistant/components/alexa/intent.py +++ b/homeassistant/components/alexa/intent.py @@ -1,5 +1,6 @@ """Support for Alexa skill service end point.""" +from collections.abc import Callable, Coroutine import enum import logging from typing import Any @@ -16,7 +17,9 @@ from .const import DOMAIN, SYN_RESOLUTION_MATCH _LOGGER = logging.getLogger(__name__) -HANDLERS = Registry() # type: ignore[var-annotated] +HANDLERS: Registry[ + str, Callable[[HomeAssistant, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]]] +] = Registry() INTENTS_API_ENDPOINT = "/api/alexa" @@ -129,8 +132,7 @@ async def async_handle_message( if not (handler := HANDLERS.get(req_type)): raise UnknownRequest(f"Received unknown request {req_type}") - response: dict[str, Any] = await handler(hass, message) - return response + return await handler(hass, message) @HANDLERS.register("SessionEndedRequest") diff --git a/homeassistant/components/automation/logbook.py b/homeassistant/components/automation/logbook.py index 7b9c8cf5809..33ed586f901 100644 --- a/homeassistant/components/automation/logbook.py +++ b/homeassistant/components/automation/logbook.py @@ -1,5 +1,8 @@ """Describe logbook events.""" +from collections.abc import Callable +from typing import Any + from homeassistant.components.logbook import ( LOGBOOK_ENTRY_CONTEXT_ID, LOGBOOK_ENTRY_ENTITY_ID, @@ -16,11 +19,16 @@ from .const import DOMAIN @callback -def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore[no-untyped-def] +def async_describe_events( + hass: HomeAssistant, + async_describe_event: Callable[ + [str, str, Callable[[LazyEventPartialState], dict[str, Any]]], None + ], +) -> None: """Describe logbook events.""" @callback - def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore[no-untyped-def] + def async_describe_logbook_event(event: LazyEventPartialState) -> dict[str, Any]: """Describe a logbook event.""" data = event.data message = "triggered" diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 49920d79ff3..4564e863e42 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -33,7 +33,7 @@ from evohomeasync2.schema.const import ( SZ_TIMING_MODE, SZ_UNTIL, ) -import voluptuous as vol # type: ignore[import-untyped] +import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, @@ -462,7 +462,7 @@ class EvoBroker: self.client.access_token_expires # type: ignore[arg-type] ) - app_storage = { + app_storage: dict[str, Any] = { CONF_USERNAME: self.client.username, REFRESH_TOKEN: self.client.refresh_token, ACCESS_TOKEN: self.client.access_token, @@ -470,11 +470,11 @@ class EvoBroker: } if self.client_v1: - app_storage[USER_DATA] = { # type: ignore[assignment] + app_storage[USER_DATA] = { SZ_SESSION_ID: self.client_v1.broker.session_id, } # this is the schema for STORAGE_VER == 1 else: - app_storage[USER_DATA] = {} # type: ignore[assignment] + app_storage[USER_DATA] = {} await self._store.async_save(app_storage) diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index 0a16e986d0b..2b0c6b77559 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -117,7 +117,7 @@ class FeedManager: def _update(self) -> struct_time | None: """Update the feed and publish new entries to the event bus.""" _LOGGER.debug("Fetching new data from feed %s", self._url) - self._feed: feedparser.FeedParserDict = feedparser.parse( # type: ignore[no-redef] + self._feed = feedparser.parse( self._url, etag=None if not self._feed else self._feed.get("etag"), modified=None if not self._feed else self._feed.get("modified"), diff --git a/homeassistant/components/geniushub/water_heater.py b/homeassistant/components/geniushub/water_heater.py index 6c3b5223ef9..f17560ebc62 100644 --- a/homeassistant/components/geniushub/water_heater.py +++ b/homeassistant/components/geniushub/water_heater.py @@ -75,9 +75,9 @@ class GeniusWaterHeater(GeniusHeatingZone, WaterHeaterEntity): return list(HA_OPMODE_TO_GH) @property - def current_operation(self) -> str: + def current_operation(self) -> str | None: """Return the current operation mode.""" - return GH_STATE_TO_HA[self._zone.data["mode"]] # type: ignore[return-value] + return GH_STATE_TO_HA[self._zone.data["mode"]] async def async_set_operation_mode(self, operation_mode: str) -> None: """Set a new operation mode for this boiler.""" diff --git a/homeassistant/components/group/notify.py b/homeassistant/components/group/notify.py index bad3d7944d3..425dcf5a914 100644 --- a/homeassistant/components/group/notify.py +++ b/homeassistant/components/group/notify.py @@ -34,12 +34,12 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( def add_defaults( - input_data: dict[str, Any], default_data: dict[str, Any] + input_data: dict[str, Any], default_data: Mapping[str, Any] ) -> dict[str, Any]: """Deep update a dictionary with default values.""" for key, val in default_data.items(): if isinstance(val, Mapping): - input_data[key] = add_defaults(input_data.get(key, {}), val) # type: ignore[arg-type] + input_data[key] = add_defaults(input_data.get(key, {}), val) elif key not in input_data: input_data[key] = val return input_data diff --git a/homeassistant/components/input_text/__init__.py b/homeassistant/components/input_text/__init__.py index 52788066ba2..55b43ee8a1e 100644 --- a/homeassistant/components/input_text/__init__.py +++ b/homeassistant/components/input_text/__init__.py @@ -264,7 +264,7 @@ class InputText(collection.CollectionEntity, RestoreEntity): return state = await self.async_get_last_state() - value: str | None = state and state.state # type: ignore[assignment] + value = state.state if state else None # Check against None because value can be 0 if value is not None and self._minimum <= len(value) <= self._maximum: From 8257af1b22de2910dc769ae7b6dac66771a0cc57 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:33:36 +0200 Subject: [PATCH 278/426] Improve energy typing (#116034) --- homeassistant/components/energy/sensor.py | 14 ++++++-------- homeassistant/components/energy/websocket_api.py | 11 +++++------ 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/energy/sensor.py b/homeassistant/components/energy/sensor.py index 37930e31af0..147d8f3e26a 100644 --- a/homeassistant/components/energy/sensor.py +++ b/homeassistant/components/energy/sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable +from collections.abc import Callable, Mapping import copy from dataclasses import dataclass import logging @@ -167,8 +167,7 @@ class SensorManager: if adapter.flow_type is None: self._process_sensor_data( adapter, - # Opting out of the type complexity because can't get it to work - energy_source, # type: ignore[arg-type] + energy_source, to_add, to_remove, ) @@ -177,8 +176,7 @@ class SensorManager: for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item] self._process_sensor_data( adapter, - # Opting out of the type complexity because can't get it to work - flow, # type: ignore[arg-type] + flow, to_add, to_remove, ) @@ -189,7 +187,7 @@ class SensorManager: def _process_sensor_data( self, adapter: SourceAdapter, - config: dict, + config: Mapping[str, Any], to_add: list[EnergyCostSensor], to_remove: dict[tuple[str, str | None, str], EnergyCostSensor], ) -> None: @@ -241,7 +239,7 @@ class EnergyCostSensor(SensorEntity): def __init__( self, adapter: SourceAdapter, - config: dict, + config: Mapping[str, Any], ) -> None: """Initialize the sensor.""" super().__init__() @@ -456,7 +454,7 @@ class EnergyCostSensor(SensorEntity): await super().async_will_remove_from_hass() @callback - def update_config(self, config: dict) -> None: + def update_config(self, config: Mapping[str, Any]) -> None: """Update the config.""" self._config = config diff --git a/homeassistant/components/energy/websocket_api.py b/homeassistant/components/energy/websocket_api.py index 2dd45a8be4d..2b5b71d3e2f 100644 --- a/homeassistant/components/energy/websocket_api.py +++ b/homeassistant/components/energy/websocket_api.py @@ -31,7 +31,7 @@ from .data import ( EnergyPreferencesUpdate, async_get_manager, ) -from .types import EnergyPlatform, GetSolarForecastType +from .types import EnergyPlatform, GetSolarForecastType, SolarForecastType from .validate import async_validate EnergyWebSocketCommandHandler = Callable[ @@ -203,19 +203,18 @@ async def ws_solar_forecast( for source in manager.data["energy_sources"]: if ( source["type"] != "solar" - or source.get("config_entry_solar_forecast") is None + or (solar_forecast := source.get("config_entry_solar_forecast")) is None ): continue - # typing is not catching the above guard for config_entry_solar_forecast being none - for config_entry in source["config_entry_solar_forecast"]: # type: ignore[union-attr] - config_entries[config_entry] = None + for entry in solar_forecast: + config_entries[entry] = None if not config_entries: connection.send_result(msg["id"], {}) return - forecasts = {} + forecasts: dict[str, SolarForecastType] = {} forecast_platforms = await async_get_energy_platforms(hass) From d4b801af3251ad0315a9967339ca0ff21eb8f320 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:39:29 +0200 Subject: [PATCH 279/426] Use snapshot test helper in Husqvarna Automower (#116039) --- .../husqvarna_automower/test_binary_sensor.py | 12 +++--------- .../husqvarna_automower/test_device_tracker.py | 13 +++---------- tests/components/husqvarna_automower/test_number.py | 13 +++---------- tests/components/husqvarna_automower/test_sensor.py | 12 +++--------- tests/components/husqvarna_automower/test_switch.py | 12 +++--------- 5 files changed, 15 insertions(+), 47 deletions(-) diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index 144dc734025..5500b547853 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -20,6 +20,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -71,13 +72,6 @@ async def test_snapshot_binary_sensor( [Platform.BINARY_SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_device_tracker.py b/tests/components/husqvarna_automower/test_device_tracker.py index d9cab0d5074..015be201ccc 100644 --- a/tests/components/husqvarna_automower/test_device_tracker.py +++ b/tests/components/husqvarna_automower/test_device_tracker.py @@ -10,7 +10,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform async def test_device_tracker_snapshot( @@ -26,13 +26,6 @@ async def test_device_tracker_snapshot( [Platform.DEVICE_TRACKER], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index abf56df1c0b..b66f1965151 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -13,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -65,13 +65,6 @@ async def test_snapshot_number( [Platform.NUMBER], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 5d304330aca..f54ce9c6275 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -21,6 +21,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -132,13 +133,6 @@ async def test_sensor( [Platform.SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 8dbb5450db1..aab1128a746 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -23,6 +23,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -106,13 +107,6 @@ async def test_switch( [Platform.SWITCH], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") From 10228ee1a2bd1bfb41eb4f74b880d9dcce45b3da Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:39:44 +0200 Subject: [PATCH 280/426] Bump python-fritzhome to 0.6.11 (#115904) --- .../components/fritzbox/coordinator.py | 26 +++++++++++-------- .../components/fritzbox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index 06454fa912a..54af8fbdacd 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -82,9 +82,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat def _update_fritz_devices(self) -> FritzboxCoordinatorData: """Update all fritzbox device data.""" try: - self.fritz.update_devices() + self.fritz.update_devices(ignore_removed=False) if self.has_templates: - self.fritz.update_templates() + self.fritz.update_templates(ignore_removed=False) except RequestConnectionError as ex: raise UpdateFailed from ex except HTTPError: @@ -93,9 +93,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.fritz.login() except LoginError as ex: raise ConfigEntryAuthFailed from ex - self.fritz.update_devices() + self.fritz.update_devices(ignore_removed=False) if self.has_templates: - self.fritz.update_templates() + self.fritz.update_templates(ignore_removed=False) devices = self.fritz.get_devices() device_data = {} @@ -124,14 +124,18 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.new_devices = device_data.keys() - self.data.devices.keys() self.new_templates = template_data.keys() - self.data.templates.keys() - if ( - self.data.devices.keys() - device_data.keys() - or self.data.templates.keys() - template_data.keys() - ): - self.cleanup_removed_devices(list(device_data) + list(template_data)) - return FritzboxCoordinatorData(devices=device_data, templates=template_data) async def _async_update_data(self) -> FritzboxCoordinatorData: """Fetch all device data.""" - return await self.hass.async_add_executor_job(self._update_fritz_devices) + new_data = await self.hass.async_add_executor_job(self._update_fritz_devices) + + if ( + self.data.devices.keys() - new_data.devices.keys() + or self.data.templates.keys() - new_data.templates.keys() + ): + self.cleanup_removed_devices( + list(new_data.devices) + list(new_data.templates) + ) + + return new_data diff --git a/homeassistant/components/fritzbox/manifest.json b/homeassistant/components/fritzbox/manifest.json index 5d41f8c12dc..de2e9e0200a 100644 --- a/homeassistant/components/fritzbox/manifest.json +++ b/homeassistant/components/fritzbox/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["pyfritzhome"], "quality_scale": "gold", - "requirements": ["pyfritzhome==0.6.10"], + "requirements": ["pyfritzhome==0.6.11"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:fritzbox:1" diff --git a/requirements_all.txt b/requirements_all.txt index a5d370fce8b..240606435ba 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1836,7 +1836,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.10 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 35c005fe4d3..51161b1afd3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1432,7 +1432,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.10 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 From 3b678896d966da540d229050ae5ed7d24e6ad906 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Tue, 23 Apr 2024 12:08:07 -0400 Subject: [PATCH 281/426] Remove platform schema from Hydrawise (#116032) --- .../components/hydrawise/binary_sensor.py | 26 -------------- homeassistant/components/hydrawise/sensor.py | 28 +-------------- homeassistant/components/hydrawise/switch.py | 36 +------------------ 3 files changed, 2 insertions(+), 88 deletions(-) diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index e75cf56ac75..a93976b12e0 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -3,20 +3,15 @@ from __future__ import annotations from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.binary_sensor import ( - PLATFORM_SCHEMA, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN from .coordinator import HydrawiseDataUpdateCoordinator @@ -39,27 +34,6 @@ BINARY_SENSOR_KEYS: list[str] = [ desc.key for desc in (BINARY_SENSOR_STATUS, *BINARY_SENSOR_TYPES) ] -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=BINARY_SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(BINARY_SENSOR_KEYS)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index eedeb4a07bc..84e9f979878 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -5,20 +5,16 @@ from __future__ import annotations from datetime import datetime from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS, UnitOfTime +from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util from .const import DOMAIN @@ -39,32 +35,10 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ) SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] - -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(SENSOR_KEYS)] - ) - } -) - TWO_YEAR_SECONDS = 60 * 60 * 24 * 365 * 2 WATERING_TIME_ICON = "mdi:water-pump" -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/homeassistant/components/hydrawise/switch.py b/homeassistant/components/hydrawise/switch.py index 49106a5938a..2dc459e7dd4 100644 --- a/homeassistant/components/hydrawise/switch.py +++ b/homeassistant/components/hydrawise/switch.py @@ -6,28 +6,18 @@ from datetime import timedelta from typing import Any from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.switch import ( - PLATFORM_SCHEMA, SwitchDeviceClass, SwitchEntity, SwitchEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util -from .const import ( - ALLOWED_WATERING_TIME, - CONF_WATERING_TIME, - DEFAULT_WATERING_TIME, - DOMAIN, -) +from .const import DEFAULT_WATERING_TIME, DOMAIN from .coordinator import HydrawiseDataUpdateCoordinator from .entity import HydrawiseEntity @@ -46,30 +36,6 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SWITCH_KEYS: list[str] = [desc.key for desc in SWITCH_TYPES] -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SWITCH_KEYS): vol.All( - cv.ensure_list, [vol.In(SWITCH_KEYS)] - ), - vol.Optional( - CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME.total_seconds() // 60 - ): vol.All(vol.In(ALLOWED_WATERING_TIME)), - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - async def async_setup_entry( hass: HomeAssistant, From d4ecf30b6a2696433b20e7e6c93fe23c2fda693f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 23 Apr 2024 18:35:53 +0200 Subject: [PATCH 282/426] Include libgammu-dev in devcontainer (#115983) --- Dockerfile.dev | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile.dev b/Dockerfile.dev index e60456f7b1f..507cc9a7bb2 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -22,6 +22,7 @@ RUN \ libavcodec-dev \ libavdevice-dev \ libavutil-dev \ + libgammu-dev \ libswscale-dev \ libswresample-dev \ libavfilter-dev \ From d8aa1cd8b51f1d6a141fe1333c6b4c81f4db35c1 Mon Sep 17 00:00:00 2001 From: Tucker Kern Date: Tue, 23 Apr 2024 11:11:40 -0600 Subject: [PATCH 283/426] Add fan preset translations and icons to BAF (#109944) --- homeassistant/components/baf/const.py | 2 +- homeassistant/components/baf/fan.py | 1 + homeassistant/components/baf/icons.json | 15 +++++++++++++++ homeassistant/components/baf/strings.json | 11 +++++++++++ 4 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/baf/icons.json diff --git a/homeassistant/components/baf/const.py b/homeassistant/components/baf/const.py index 9876d7ffec3..4d5020bdf02 100644 --- a/homeassistant/components/baf/const.py +++ b/homeassistant/components/baf/const.py @@ -9,7 +9,7 @@ QUERY_INTERVAL = 300 RUN_TIMEOUT = 20 -PRESET_MODE_AUTO = "Auto" +PRESET_MODE_AUTO = "auto" SPEED_COUNT = 7 SPEED_RANGE = (1, SPEED_COUNT) diff --git a/homeassistant/components/baf/fan.py b/homeassistant/components/baf/fan.py index 15c6519747d..6c90e2a53cb 100644 --- a/homeassistant/components/baf/fan.py +++ b/homeassistant/components/baf/fan.py @@ -48,6 +48,7 @@ class BAFFan(BAFEntity, FanEntity): _attr_preset_modes = [PRESET_MODE_AUTO] _attr_speed_count = SPEED_COUNT _attr_name = None + _attr_translation_key = "baf" @callback def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/baf/icons.json b/homeassistant/components/baf/icons.json new file mode 100644 index 00000000000..c91c4cde86a --- /dev/null +++ b/homeassistant/components/baf/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "fan": { + "baf": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "mdi:fan-auto" + } + } + } + } + } + } +} diff --git a/homeassistant/components/baf/strings.json b/homeassistant/components/baf/strings.json index 5143b519d27..e2f02a6095e 100644 --- a/homeassistant/components/baf/strings.json +++ b/homeassistant/components/baf/strings.json @@ -26,6 +26,17 @@ "name": "Auto comfort" } }, + "fan": { + "baf": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]" + } + } + } + } + }, "number": { "comfort_min_speed": { "name": "Auto Comfort Minimum Speed" From cc9eab4c78a77cd4004c96fb15b93edd03367481 Mon Sep 17 00:00:00 2001 From: Jim Date: Tue, 23 Apr 2024 18:32:09 +0100 Subject: [PATCH 284/426] Allow plain text messages in telegram_bot (#110051) * Add new plain_text parser Passing None in the parse_mode kwargs on the various bot methods actually means that no parser is used. * Add new plain text parser option to services.yaml --------- Co-authored-by: Erik Montnemery --- homeassistant/components/telegram_bot/__init__.py | 2 ++ homeassistant/components/telegram_bot/services.yaml | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 897fd6a9bac..f672ae1547f 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -122,6 +122,7 @@ EVENT_TELEGRAM_SENT = "telegram_sent" PARSER_HTML = "html" PARSER_MD = "markdown" PARSER_MD2 = "markdownv2" +PARSER_PLAIN_TEXT = "plain_text" DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4.0/22")] @@ -524,6 +525,7 @@ class TelegramNotificationService: PARSER_HTML: ParseMode.HTML, PARSER_MD: ParseMode.MARKDOWN, PARSER_MD2: ParseMode.MARKDOWN_V2, + PARSER_PLAIN_TEXT: None, } self._parse_mode = self._parsers.get(parser) self.bot = bot diff --git a/homeassistant/components/telegram_bot/services.yaml b/homeassistant/components/telegram_bot/services.yaml index 1587f754508..d2195c1d6ce 100644 --- a/homeassistant/components/telegram_bot/services.yaml +++ b/homeassistant/components/telegram_bot/services.yaml @@ -22,6 +22,7 @@ send_message: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -94,6 +95,7 @@ send_photo: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -229,6 +231,7 @@ send_animation: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -300,6 +303,7 @@ send_video: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -435,6 +439,7 @@ send_document: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -587,6 +592,7 @@ edit_message: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_web_page_preview: selector: boolean: From 5826f9a4f605e2769a63a43f70a6dc49dcc6d87b Mon Sep 17 00:00:00 2001 From: Jonny Rimkus Date: Mon, 1 Apr 2024 10:50:21 +0200 Subject: [PATCH 285/426] Bump slixmpp version to 1.8.5 (#114448) * Update slixmpp to 1.8.5, hopefully fixes #113990 * Bump slixmpp version to 1.8.5 #114448 --- homeassistant/components/xmpp/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/xmpp/manifest.json b/homeassistant/components/xmpp/manifest.json index 30dee6c842b..308c3d70978 100644 --- a/homeassistant/components/xmpp/manifest.json +++ b/homeassistant/components/xmpp/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/xmpp", "iot_class": "cloud_push", "loggers": ["pyasn1", "slixmpp"], - "requirements": ["slixmpp==1.8.4", "emoji==2.8.0"] + "requirements": ["slixmpp==1.8.5", "emoji==2.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 194dda7caac..c93b665ea8c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2553,7 +2553,7 @@ sisyphus-control==3.1.3 slackclient==2.5.0 # homeassistant.components.xmpp -slixmpp==1.8.4 +slixmpp==1.8.5 # homeassistant.components.smart_meter_texas smart-meter-texas==0.4.7 From 5194faa8fd959832086fb0064beebd627f7f5a9f Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 19 Apr 2024 18:18:32 +0200 Subject: [PATCH 286/426] Make Withings recoverable after internet outage (#115124) --- homeassistant/components/withings/__init__.py | 7 +- tests/components/withings/test_init.py | 105 ++++++++++++++++++ 2 files changed, 111 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index c14fb465731..665a90ec5a7 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -12,6 +12,7 @@ from dataclasses import dataclass, field from datetime import timedelta from typing import TYPE_CHECKING, Any, cast +from aiohttp import ClientError from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response from aiowithings import NotificationCategory, WithingsClient @@ -340,7 +341,11 @@ class WithingsWebhookManager: async def async_unsubscribe_webhooks(client: WithingsClient) -> None: """Unsubscribe to all Withings webhooks.""" - current_webhooks = await client.list_notification_configurations() + try: + current_webhooks = await client.list_notification_configurations() + except ClientError: + LOGGER.exception("Error when unsubscribing webhooks") + return for webhook_configuration in current_webhooks: LOGGER.debug( diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index eb089f44216..42b2b8da965 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from urllib.parse import urlparse +from aiohttp import ClientConnectionError from aiohttp.hdrs import METH_HEAD from aiowithings import ( NotificationCategory, @@ -508,6 +509,110 @@ async def test_cloud_disconnect( assert withings.subscribe_notification.call_count == 12 +async def test_internet_disconnect( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we can recover from internet disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object(cloud, "async_active_subscription", return_value=True), + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert withings.revoke_notification_configurations.call_count == 3 + assert withings.subscribe_notification.call_count == 6 + + await hass.async_block_till_done() + + withings.list_notification_configurations.side_effect = ClientConnectionError + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert withings.revoke_notification_configurations.call_count == 3 + + async_mock_cloud_connection_status(hass, True) + await hass.async_block_till_done() + + assert withings.subscribe_notification.call_count == 12 + + +async def test_cloud_disconnect_retry( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we retry to create webhook connection again after cloud disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object( + cloud, "async_active_subscription", return_value=True + ) as mock_async_active_subscription, + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert mock_async_active_subscription.call_count == 3 + + await hass.async_block_till_done() + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 3 + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 4 + + @pytest.mark.parametrize( ("body", "expected_code"), [ From 7eb6b2ca3362bd848be23563a3699d8206fb4a94 Mon Sep 17 00:00:00 2001 From: avee87 <6134677+avee87@users.noreply.github.com> Date: Fri, 19 Apr 2024 17:21:21 +0100 Subject: [PATCH 287/426] Fix Hyperion light not updating state (#115389) --- homeassistant/components/hyperion/sensor.py | 4 ++-- tests/components/hyperion/test_sensor.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hyperion/sensor.py b/homeassistant/components/hyperion/sensor.py index f537c282686..ad972806ae5 100644 --- a/homeassistant/components/hyperion/sensor.py +++ b/homeassistant/components/hyperion/sensor.py @@ -191,13 +191,13 @@ class HyperionVisiblePrioritySensor(HyperionSensor): if priority[KEY_COMPONENTID] == "COLOR": state_value = priority[KEY_VALUE][KEY_RGB] else: - state_value = priority[KEY_OWNER] + state_value = priority.get(KEY_OWNER) attrs = { "component_id": priority[KEY_COMPONENTID], "origin": priority[KEY_ORIGIN], "priority": priority[KEY_PRIORITY], - "owner": priority[KEY_OWNER], + "owner": priority.get(KEY_OWNER), } if priority[KEY_COMPONENTID] == "COLOR": diff --git a/tests/components/hyperion/test_sensor.py b/tests/components/hyperion/test_sensor.py index 65991b4b7e1..8900db177fc 100644 --- a/tests/components/hyperion/test_sensor.py +++ b/tests/components/hyperion/test_sensor.py @@ -159,7 +159,6 @@ async def test_visible_effect_state_changes(hass: HomeAssistant) -> None: KEY_ACTIVE: True, KEY_COMPONENTID: "COLOR", KEY_ORIGIN: "System", - KEY_OWNER: "System", KEY_PRIORITY: 250, KEY_VALUE: {KEY_RGB: [0, 0, 0]}, KEY_VISIBLE: True, From 42c13eb57feffa6db9825fc0ecb61feb9ed129bb Mon Sep 17 00:00:00 2001 From: slyoldfox Date: Fri, 19 Apr 2024 18:22:12 +0200 Subject: [PATCH 288/426] Add scheduled mode to renault charge mode (#115427) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/renault/select.py | 2 +- tests/components/renault/const.py | 21 ++++++++++++++++--- .../renault/snapshots/test_select.ambr | 12 +++++++++++ 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/renault/select.py b/homeassistant/components/renault/select.py index f6c8f73d24b..eb79e197937 100644 --- a/homeassistant/components/renault/select.py +++ b/homeassistant/components/renault/select.py @@ -71,6 +71,6 @@ SENSOR_TYPES: tuple[RenaultSelectEntityDescription, ...] = ( coordinator="charge_mode", data_key="chargeMode", translation_key="charge_mode", - options=["always", "always_charging", "schedule_mode"], + options=["always", "always_charging", "schedule_mode", "scheduled"], ), ) diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index d849c658149..19c40f6ec20 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -127,7 +127,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -363,7 +368,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-clock", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "schedule_mode", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -599,7 +609,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777123_charge_mode", }, diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 7e8356ee070..0722cb5cab3 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -82,6 +82,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -121,6 +122,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -175,6 +177,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -214,6 +217,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -268,6 +272,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -307,6 +312,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -401,6 +407,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -440,6 +447,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -494,6 +502,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -533,6 +542,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -587,6 +597,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -626,6 +637,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , From 624762451410ff0205f99d7861bf4eb9560ab867 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 12 Apr 2024 14:13:06 -1000 Subject: [PATCH 289/426] Bump zeroconf to 0.132.1 (#115501) --- homeassistant/components/zeroconf/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 7c489517dd7..3bddbfea576 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.132.0"] + "requirements": ["zeroconf==0.132.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 366f72cd2bc..a63e2853ad2 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -60,7 +60,7 @@ voluptuous-serialize==2.6.0 voluptuous==0.13.1 webrtc-noise-gain==1.2.3 yarl==1.9.4 -zeroconf==0.132.0 +zeroconf==0.132.1 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 diff --git a/requirements_all.txt b/requirements_all.txt index c93b665ea8c..b0eae5b399a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2928,7 +2928,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.132.0 +zeroconf==0.132.1 # homeassistant.components.zeversolar zeversolar==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dfa71c7ac3e..169b72e5acf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2263,7 +2263,7 @@ yt-dlp==2024.04.09 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.132.0 +zeroconf==0.132.1 # homeassistant.components.zeversolar zeversolar==0.3.1 From e1a241607619212787f3e3a6ab49ee956b243a2a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 12 Apr 2024 19:05:08 -1000 Subject: [PATCH 290/426] Bump zeroconf to 0.132.2 (#115505) --- homeassistant/components/zeroconf/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 3bddbfea576..0a76af3b9c2 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.132.1"] + "requirements": ["zeroconf==0.132.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index a63e2853ad2..9b481092ed4 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -60,7 +60,7 @@ voluptuous-serialize==2.6.0 voluptuous==0.13.1 webrtc-noise-gain==1.2.3 yarl==1.9.4 -zeroconf==0.132.1 +zeroconf==0.132.2 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 diff --git a/requirements_all.txt b/requirements_all.txt index b0eae5b399a..6717c1db138 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2928,7 +2928,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.132.1 +zeroconf==0.132.2 # homeassistant.components.zeversolar zeversolar==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 169b72e5acf..2fba371b287 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2263,7 +2263,7 @@ yt-dlp==2024.04.09 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.132.1 +zeroconf==0.132.2 # homeassistant.components.zeversolar zeversolar==0.3.1 From b770edc16ef020d62b404989178db7bca560f6d5 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sat, 13 Apr 2024 18:26:33 +0200 Subject: [PATCH 291/426] Update pillow to 10.3.0 (#115524) --- homeassistant/components/doods/manifest.json | 2 +- homeassistant/components/generic/manifest.json | 2 +- homeassistant/components/image_upload/manifest.json | 2 +- homeassistant/components/matrix/manifest.json | 2 +- homeassistant/components/proxy/manifest.json | 2 +- homeassistant/components/qrcode/manifest.json | 2 +- homeassistant/components/seven_segments/manifest.json | 2 +- homeassistant/components/sighthound/manifest.json | 2 +- homeassistant/components/tensorflow/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 14 files changed, 14 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/doods/manifest.json b/homeassistant/components/doods/manifest.json index 73d7d3754ce..6a198ab34e7 100644 --- a/homeassistant/components/doods/manifest.json +++ b/homeassistant/components/doods/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/doods", "iot_class": "local_polling", "loggers": ["pydoods"], - "requirements": ["pydoods==1.0.2", "Pillow==10.2.0"] + "requirements": ["pydoods==1.0.2", "Pillow==10.3.0"] } diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index 861e2cf26c2..65f6aa751ca 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/generic", "iot_class": "local_push", - "requirements": ["ha-av==10.1.1", "Pillow==10.2.0"] + "requirements": ["ha-av==10.1.1", "Pillow==10.3.0"] } diff --git a/homeassistant/components/image_upload/manifest.json b/homeassistant/components/image_upload/manifest.json index ba9140b4ed8..7cbc484b830 100644 --- a/homeassistant/components/image_upload/manifest.json +++ b/homeassistant/components/image_upload/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/image_upload", "integration_type": "system", "quality_scale": "internal", - "requirements": ["Pillow==10.2.0"] + "requirements": ["Pillow==10.3.0"] } diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 0838bcc3764..2ea310aa5a6 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.24.0", "Pillow==10.2.0"] + "requirements": ["matrix-nio==0.24.0", "Pillow==10.3.0"] } diff --git a/homeassistant/components/proxy/manifest.json b/homeassistant/components/proxy/manifest.json index 1b05a768b64..42770d71792 100644 --- a/homeassistant/components/proxy/manifest.json +++ b/homeassistant/components/proxy/manifest.json @@ -3,5 +3,5 @@ "name": "Camera Proxy", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/proxy", - "requirements": ["Pillow==10.2.0"] + "requirements": ["Pillow==10.3.0"] } diff --git a/homeassistant/components/qrcode/manifest.json b/homeassistant/components/qrcode/manifest.json index e3b202a9950..476f4e8c3c9 100644 --- a/homeassistant/components/qrcode/manifest.json +++ b/homeassistant/components/qrcode/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/qrcode", "iot_class": "calculated", "loggers": ["pyzbar"], - "requirements": ["Pillow==10.2.0", "pyzbar==0.1.7"] + "requirements": ["Pillow==10.3.0", "pyzbar==0.1.7"] } diff --git a/homeassistant/components/seven_segments/manifest.json b/homeassistant/components/seven_segments/manifest.json index 6c511e3f44e..5e05f496d1d 100644 --- a/homeassistant/components/seven_segments/manifest.json +++ b/homeassistant/components/seven_segments/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/seven_segments", "iot_class": "local_polling", - "requirements": ["Pillow==10.2.0"] + "requirements": ["Pillow==10.3.0"] } diff --git a/homeassistant/components/sighthound/manifest.json b/homeassistant/components/sighthound/manifest.json index e63864af707..b97ccc5f9cf 100644 --- a/homeassistant/components/sighthound/manifest.json +++ b/homeassistant/components/sighthound/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/sighthound", "iot_class": "cloud_polling", "loggers": ["simplehound"], - "requirements": ["Pillow==10.2.0", "simplehound==0.3"] + "requirements": ["Pillow==10.3.0", "simplehound==0.3"] } diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index b98c4c6e428..40dbadca64d 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -10,6 +10,6 @@ "tf-models-official==2.5.0", "pycocotools==2.0.6", "numpy==1.26.0", - "Pillow==10.2.0" + "Pillow==10.3.0" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9b481092ed4..13a28e34d28 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -40,7 +40,7 @@ mutagen==1.47.0 orjson==3.9.15 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.2.0 +Pillow==10.3.0 pip>=21.3.1 psutil-home-assistant==0.0.1 PyJWT==2.8.0 diff --git a/pyproject.toml b/pyproject.toml index 74b6f6fa54e..3a206a3f7fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ dependencies = [ "PyJWT==2.8.0", # PyJWT has loose dependency. We want the latest one. "cryptography==42.0.5", - "Pillow==10.2.0", + "Pillow==10.3.0", "pyOpenSSL==24.1.0", "orjson==3.9.15", "packaging>=23.1", diff --git a/requirements.txt b/requirements.txt index 519a8287d18..5635c4912e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,7 +24,7 @@ Jinja2==3.1.3 lru-dict==1.3.0 PyJWT==2.8.0 cryptography==42.0.5 -Pillow==10.2.0 +Pillow==10.3.0 pyOpenSSL==24.1.0 orjson==3.9.15 packaging>=23.1 diff --git a/requirements_all.txt b/requirements_all.txt index 6717c1db138..640ad0e2306 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -42,7 +42,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.2.0 +Pillow==10.3.0 # homeassistant.components.plex PlexAPI==4.15.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2fba371b287..a65320152be 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -36,7 +36,7 @@ HATasmota==0.8.0 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.2.0 +Pillow==10.3.0 # homeassistant.components.plex PlexAPI==4.15.11 From 038040db5e06c128b29703690fd4ca75ddace7ac Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 13 Apr 2024 10:35:07 -1000 Subject: [PATCH 292/426] Fix race in TimestampDataUpdateCoordinator (#115542) * Fix race in TimestampDataUpdateCoordinator The last_update_success_time value was being set after the listeners were fired which could lead to a loop because the listener may re-trigger an update because it thinks the data is stale * coverage * docstring --- homeassistant/helpers/update_coordinator.py | 28 ++++++++-------- tests/helpers/test_update_coordinator.py | 36 +++++++++++++++++++-- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 287e69f7085..98e635e5ac7 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -403,6 +403,8 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): if not auth_failed and self._listeners and not self.hass.is_stopping: self._schedule_refresh() + self._async_refresh_finished() + if not self.last_update_success and not previous_update_success: return @@ -413,6 +415,15 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): ): self.async_update_listeners() + @callback + def _async_refresh_finished(self) -> None: + """Handle when a refresh has finished. + + Called when refresh is finished before listeners are updated. + + To be overridden by subclasses. + """ + @callback def async_set_update_error(self, err: Exception) -> None: """Manually set an error, log the message and notify listeners.""" @@ -446,20 +457,9 @@ class TimestampDataUpdateCoordinator(DataUpdateCoordinator[_DataT]): last_update_success_time: datetime | None = None - async def _async_refresh( - self, - log_failures: bool = True, - raise_on_auth_failed: bool = False, - scheduled: bool = False, - raise_on_entry_error: bool = False, - ) -> None: - """Refresh data.""" - await super()._async_refresh( - log_failures, - raise_on_auth_failed, - scheduled, - raise_on_entry_error, - ) + @callback + def _async_refresh_finished(self) -> None: + """Handle when a refresh has finished.""" if self.last_update_success: self.last_update_success_time = utcnow() diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 25f72d76e3c..775dc08f1d4 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -1,6 +1,6 @@ """Tests for the update coordinator.""" -from datetime import timedelta +from datetime import datetime, timedelta import logging from unittest.mock import AsyncMock, Mock, patch import urllib.error @@ -12,7 +12,7 @@ import requests from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import update_coordinator from homeassistant.util.dt import utcnow @@ -716,3 +716,35 @@ async def test_always_callback_when_always_update_is_true( update_callback.reset_mock() remove_callbacks() + + +async def test_timestamp_date_update_coordinator(hass: HomeAssistant) -> None: + """Test last_update_success_time is set before calling listeners.""" + last_update_success_times: list[datetime | None] = [] + + async def refresh() -> int: + return 1 + + crd = update_coordinator.TimestampDataUpdateCoordinator[int]( + hass, + _LOGGER, + name="test", + update_method=refresh, + update_interval=timedelta(seconds=10), + ) + + @callback + def listener(): + last_update_success_times.append(crd.last_update_success_time) + + unsub = crd.async_add_listener(listener) + + await crd.async_refresh() + + assert len(last_update_success_times) == 1 + # Ensure the time is set before the listener is called + assert last_update_success_times != [None] + + unsub() + await crd.async_refresh() + assert len(last_update_success_times) == 1 From 37a82c878514c4377c51cbeee7396d436271c91e Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Mon, 15 Apr 2024 09:48:22 +1000 Subject: [PATCH 293/426] Fix Teslemetry sensor values (#115571) --- homeassistant/components/teslemetry/sensor.py | 5 + .../teslemetry/snapshots/test_sensor.ambr | 100 +++++++++--------- 2 files changed, 55 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index 6284a0e5368..cced1090e2a 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -449,6 +449,11 @@ class TeslemetryVehicleSensorEntity(TeslemetryVehicleEntity, SensorEntity): """Initialize the sensor.""" super().__init__(vehicle, description.key) + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self._value + class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity): """Base class for Teslemetry vehicle metric sensors.""" diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index fad04d341c9..81142e40901 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -757,7 +757,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '77', }) # --- # name: test_sensors[sensor.test_battery_level-statealt] @@ -770,7 +770,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '77', }) # --- # name: test_sensors[sensor.test_battery_range-entry] @@ -816,7 +816,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '266.87', }) # --- # name: test_sensors[sensor.test_battery_range-statealt] @@ -829,7 +829,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '266.87', }) # --- # name: test_sensors[sensor.test_charge_cable-entry] @@ -875,7 +875,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'IEC', }) # --- # name: test_sensors[sensor.test_charge_cable-statealt] @@ -888,7 +888,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'IEC', }) # --- # name: test_sensors[sensor.test_charge_energy_added-entry] @@ -934,7 +934,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charge_energy_added-statealt] @@ -947,7 +947,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charge_rate-entry] @@ -993,7 +993,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charge_rate-statealt] @@ -1006,7 +1006,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charger_current-entry] @@ -1052,7 +1052,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charger_current-statealt] @@ -1065,7 +1065,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charger_power-entry] @@ -1111,7 +1111,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charger_power-statealt] @@ -1124,7 +1124,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_charger_voltage-entry] @@ -1170,7 +1170,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2', }) # --- # name: test_sensors[sensor.test_charger_voltage-statealt] @@ -1183,7 +1183,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2', }) # --- # name: test_sensors[sensor.test_charging-entry] @@ -1229,7 +1229,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'Stopped', }) # --- # name: test_sensors[sensor.test_charging-statealt] @@ -1242,7 +1242,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'Stopped', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-entry] @@ -1288,7 +1288,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0.039491', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-statealt] @@ -1301,7 +1301,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_driver_temperature_setting-entry] @@ -1347,7 +1347,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '22', }) # --- # name: test_sensors[sensor.test_driver_temperature_setting-statealt] @@ -1360,7 +1360,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '22', }) # --- # name: test_sensors[sensor.test_estimate_battery_range-entry] @@ -1406,7 +1406,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '275.04', }) # --- # name: test_sensors[sensor.test_estimate_battery_range-statealt] @@ -1419,7 +1419,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '275.04', }) # --- # name: test_sensors[sensor.test_fast_charger_type-entry] @@ -1465,7 +1465,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'ACSingleWireCAN', }) # --- # name: test_sensors[sensor.test_fast_charger_type-statealt] @@ -1478,7 +1478,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'ACSingleWireCAN', }) # --- # name: test_sensors[sensor.test_ideal_battery_range-entry] @@ -1524,7 +1524,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '266.87', }) # --- # name: test_sensors[sensor.test_ideal_battery_range-statealt] @@ -1537,7 +1537,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '266.87', }) # --- # name: test_sensors[sensor.test_inside_temperature-entry] @@ -1583,7 +1583,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '29.8', }) # --- # name: test_sensors[sensor.test_inside_temperature-statealt] @@ -1596,7 +1596,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '29.8', }) # --- # name: test_sensors[sensor.test_odometer-entry] @@ -1642,7 +1642,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '6481.019282', }) # --- # name: test_sensors[sensor.test_odometer-statealt] @@ -1655,7 +1655,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '6481.019282', }) # --- # name: test_sensors[sensor.test_outside_temperature-entry] @@ -1701,7 +1701,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '30', }) # --- # name: test_sensors[sensor.test_outside_temperature-statealt] @@ -1714,7 +1714,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '30', }) # --- # name: test_sensors[sensor.test_passenger_temperature_setting-entry] @@ -1760,7 +1760,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '22', }) # --- # name: test_sensors[sensor.test_passenger_temperature_setting-statealt] @@ -1773,7 +1773,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '22', }) # --- # name: test_sensors[sensor.test_power-entry] @@ -1819,7 +1819,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '-7', }) # --- # name: test_sensors[sensor.test_power-statealt] @@ -1832,7 +1832,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '-7', }) # --- # name: test_sensors[sensor.test_shift_state-entry] @@ -2177,7 +2177,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-statealt] @@ -2190,7 +2190,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-entry] @@ -2236,7 +2236,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.8', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-statealt] @@ -2249,7 +2249,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.8', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-entry] @@ -2295,7 +2295,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] @@ -2308,7 +2308,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-entry] @@ -2354,7 +2354,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] @@ -2367,7 +2367,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2.775', }) # --- # name: test_sensors[sensor.test_traffic_delay-entry] @@ -2413,7 +2413,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_traffic_delay-statealt] @@ -2426,7 +2426,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_usable_battery_level-entry] @@ -2472,7 +2472,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '77', }) # --- # name: test_sensors[sensor.test_usable_battery_level-statealt] @@ -2485,7 +2485,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '77', }) # --- # name: test_sensors[sensor.wall_connector_fault_state_code-entry] From 3d68ee99a4e653b6740934cc429e0256a5ceaf0a Mon Sep 17 00:00:00 2001 From: jan iversen Date: Sun, 14 Apr 2024 19:22:42 +0200 Subject: [PATCH 294/426] Modbus: Bump pymodbus v3.6.8 (#115574) --- homeassistant/components/modbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 0fe8c7bc42d..5635adc9392 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["pymodbus"], "quality_scale": "platinum", - "requirements": ["pymodbus==3.6.7"] + "requirements": ["pymodbus==3.6.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 640ad0e2306..0c1725ca089 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1973,7 +1973,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.7 +pymodbus==3.6.8 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a65320152be..ced79b46ae8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1533,7 +1533,7 @@ pymeteoclimatic==0.1.0 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.7 +pymodbus==3.6.8 # homeassistant.components.monoprice pymonoprice==0.4 From 66918d1686415d5abc51d2440c8b1211b28fbe7b Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sun, 21 Apr 2024 07:54:24 +1000 Subject: [PATCH 295/426] Fix sensor entity description in Teslemetry (#115614) Add description back to sensor entity --- homeassistant/components/teslemetry/sensor.py | 5 +- .../teslemetry/snapshots/test_sensor.ambr | 538 ++++++++++++++---- 2 files changed, 428 insertions(+), 115 deletions(-) diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index cced1090e2a..6380a4d0c71 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -58,7 +58,7 @@ SHIFT_STATES = {"P": "p", "D": "d", "R": "r", "N": "n"} class TeslemetrySensorEntityDescription(SensorEntityDescription): """Describes Teslemetry Sensor entity.""" - value_fn: Callable[[StateType], StateType | datetime] = lambda x: x + value_fn: Callable[[StateType], StateType] = lambda x: x VEHICLE_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = ( @@ -447,12 +447,13 @@ class TeslemetryVehicleSensorEntity(TeslemetryVehicleEntity, SensorEntity): description: TeslemetrySensorEntityDescription, ) -> None: """Initialize the sensor.""" + self.entity_description = description super().__init__(vehicle, description.key) @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self._value + return self.entity_description.value_fn(self._value) class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity): diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 81142e40901..0d817ad1f7e 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -719,7 +719,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -736,7 +738,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery level', 'platform': 'teslemetry', @@ -744,13 +746,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_level', 'unique_id': 'VINVINVIN-charge_state_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -763,7 +768,10 @@ # name: test_sensors[sensor.test_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -778,7 +786,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -794,8 +804,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery range', 'platform': 'teslemetry', @@ -803,33 +819,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_range', 'unique_id': 'VINVINVIN-charge_state_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_charge_cable-entry] @@ -843,7 +865,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_cable', 'has_entity_name': True, 'hidden_by': None, @@ -896,7 +918,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -912,8 +936,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge energy added', 'platform': 'teslemetry', @@ -921,13 +948,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_energy_added', 'unique_id': 'VINVINVIN-charge_state_charge_energy_added', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_energy_added-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -940,7 +970,10 @@ # name: test_sensors[sensor.test_charge_energy_added-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -955,13 +988,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_rate', 'has_entity_name': True, 'hidden_by': None, @@ -971,8 +1006,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge rate', 'platform': 'teslemetry', @@ -980,13 +1018,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_rate', 'unique_id': 'VINVINVIN-charge_state_charge_rate', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_rate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -999,7 +1040,10 @@ # name: test_sensors[sensor.test_charge_rate-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -1014,13 +1058,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_current', 'has_entity_name': True, 'hidden_by': None, @@ -1031,7 +1077,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger current', 'platform': 'teslemetry', @@ -1039,13 +1085,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_actual_current', 'unique_id': 'VINVINVIN-charge_state_charger_actual_current', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_current-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1058,7 +1107,10 @@ # name: test_sensors[sensor.test_charger_current-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1073,7 +1125,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1090,7 +1144,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger power', 'platform': 'teslemetry', @@ -1098,13 +1152,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_power', 'unique_id': 'VINVINVIN-charge_state_charger_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1117,7 +1174,10 @@ # name: test_sensors[sensor.test_charger_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1132,13 +1192,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_voltage', 'has_entity_name': True, 'hidden_by': None, @@ -1149,7 +1211,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger voltage', 'platform': 'teslemetry', @@ -1157,13 +1219,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_voltage', 'unique_id': 'VINVINVIN-charge_state_charger_voltage', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1176,7 +1241,10 @@ # name: test_sensors[sensor.test_charger_voltage-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1191,7 +1259,16 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1208,7 +1285,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging', 'platform': 'teslemetry', @@ -1222,27 +1299,45 @@ # name: test_sensors[sensor.test_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_charging-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-entry] @@ -1250,7 +1345,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1266,8 +1363,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Distance to arrival', 'platform': 'teslemetry', @@ -1275,26 +1375,32 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_miles_to_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_miles_to_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_distance_to_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.039491', + 'state': '0.063555', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', @@ -1309,13 +1415,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_driver_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1325,8 +1433,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Driver temperature setting', 'platform': 'teslemetry', @@ -1334,13 +1445,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_driver_temp_setting', 'unique_id': 'VINVINVIN-climate_state_driver_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_driver_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1353,7 +1467,10 @@ # name: test_sensors[sensor.test_driver_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1368,7 +1485,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1384,8 +1503,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Estimate battery range', 'platform': 'teslemetry', @@ -1393,33 +1518,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_est_battery_range', 'unique_id': 'VINVINVIN-charge_state_est_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_estimate_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_estimate_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_fast_charger_type-entry] @@ -1433,7 +1564,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_fast_charger_type', 'has_entity_name': True, 'hidden_by': None, @@ -1486,7 +1617,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1502,8 +1635,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Ideal battery range', 'platform': 'teslemetry', @@ -1511,33 +1650,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_ideal_battery_range', 'unique_id': 'VINVINVIN-charge_state_ideal_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_ideal_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_ideal_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_inside_temperature-entry] @@ -1545,7 +1690,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1561,8 +1708,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Inside temperature', 'platform': 'teslemetry', @@ -1570,13 +1720,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_inside_temp', 'unique_id': 'VINVINVIN-climate_state_inside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_inside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1589,7 +1742,10 @@ # name: test_sensors[sensor.test_inside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1604,13 +1760,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_odometer', 'has_entity_name': True, 'hidden_by': None, @@ -1620,8 +1778,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Odometer', 'platform': 'teslemetry', @@ -1629,33 +1793,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_odometer', 'unique_id': 'VINVINVIN-vehicle_state_odometer', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_odometer-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_odometer-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_outside_temperature-entry] @@ -1663,7 +1833,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1679,8 +1851,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Outside temperature', 'platform': 'teslemetry', @@ -1688,13 +1863,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_outside_temp', 'unique_id': 'VINVINVIN-climate_state_outside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_outside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1707,7 +1885,10 @@ # name: test_sensors[sensor.test_outside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1722,13 +1903,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_passenger_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1738,8 +1921,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Passenger temperature setting', 'platform': 'teslemetry', @@ -1747,13 +1933,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_passenger_temp_setting', 'unique_id': 'VINVINVIN-climate_state_passenger_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_passenger_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1766,7 +1955,10 @@ # name: test_sensors[sensor.test_passenger_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1781,13 +1973,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_power', 'has_entity_name': True, 'hidden_by': None, @@ -1798,7 +1992,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Power', 'platform': 'teslemetry', @@ -1806,13 +2000,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_power', 'unique_id': 'VINVINVIN-drive_state_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1825,7 +2022,10 @@ # name: test_sensors[sensor.test_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1840,7 +2040,14 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1857,7 +2064,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Shift state', 'platform': 'teslemetry', @@ -1871,27 +2078,41 @@ # name: test_sensors[sensor.test_shift_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_shift_state-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_speed-entry] @@ -1899,7 +2120,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1915,8 +2138,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Speed', 'platform': 'teslemetry', @@ -1924,33 +2150,39 @@ 'supported_features': 0, 'translation_key': 'drive_state_speed', 'unique_id': 'VINVINVIN-drive_state_speed', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_speed-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_speed-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] @@ -1958,13 +2190,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', 'has_entity_name': True, 'hidden_by': None, @@ -1975,7 +2209,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'State of charge at arrival', 'platform': 'teslemetry', @@ -1983,13 +2217,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_energy_at_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_energy_at_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2002,7 +2239,10 @@ # name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2139,13 +2379,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'has_entity_name': True, 'hidden_by': None, @@ -2155,8 +2397,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front left', 'platform': 'teslemetry', @@ -2164,33 +2412,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-entry] @@ -2198,13 +2452,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'has_entity_name': True, 'hidden_by': None, @@ -2214,8 +2470,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front right', 'platform': 'teslemetry', @@ -2223,33 +2485,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-entry] @@ -2257,13 +2525,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'has_entity_name': True, 'hidden_by': None, @@ -2273,8 +2543,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear left', 'platform': 'teslemetry', @@ -2282,33 +2558,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-entry] @@ -2316,13 +2598,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'has_entity_name': True, 'hidden_by': None, @@ -2332,8 +2616,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear right', 'platform': 'teslemetry', @@ -2341,33 +2631,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_traffic_delay-entry] @@ -2375,7 +2671,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2392,7 +2690,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Traffic delay', 'platform': 'teslemetry', @@ -2400,13 +2698,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_traffic_minutes_delay', 'unique_id': 'VINVINVIN-drive_state_active_route_traffic_minutes_delay', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_traffic_delay-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2419,7 +2720,10 @@ # name: test_sensors[sensor.test_traffic_delay-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2434,7 +2738,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2451,7 +2757,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Usable battery level', 'platform': 'teslemetry', @@ -2459,13 +2765,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_usable_battery_level', 'unique_id': 'VINVINVIN-charge_state_usable_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_usable_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', @@ -2478,7 +2787,10 @@ # name: test_sensors[sensor.test_usable_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', From 09ed0aa399443656f064bdb94c7042b6994705df Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 16 Apr 2024 10:13:47 -0500 Subject: [PATCH 296/426] Bump httpcore to 1.0.5 (#115672) Fixes missing handling of EndOfStream errors --- homeassistant/package_constraints.txt | 2 +- script/gen_requirements_all.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 13a28e34d28..49c7cf4a992 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -107,7 +107,7 @@ regex==2021.8.28 # requirements so we can directly link HA versions to these library versions. anyio==4.3.0 h11==0.14.0 -httpcore==1.0.4 +httpcore==1.0.5 # Ensure we have a hyperframe version that works in Python 3.10 # 5.2.0 fixed a collections abc deprecation diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 9a9ff6821c7..1423ce92b89 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -100,7 +100,7 @@ regex==2021.8.28 # requirements so we can directly link HA versions to these library versions. anyio==4.3.0 h11==0.14.0 -httpcore==1.0.4 +httpcore==1.0.5 # Ensure we have a hyperframe version that works in Python 3.10 # 5.2.0 fixed a collections abc deprecation From 630763ad9ee3e443d41eee1155a9c7a69980c002 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 16 Apr 2024 01:30:51 -0500 Subject: [PATCH 297/426] Bump sqlparse to 0.5.0 (#115681) fixes https://github.com/home-assistant/core/security/dependabot/54 fixes https://github.com/home-assistant/core/security/dependabot/55 --- homeassistant/components/sql/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index dd44af89237..30d071f25af 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.29", "sqlparse==0.4.4"] + "requirements": ["SQLAlchemy==2.0.29", "sqlparse==0.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0c1725ca089..cbe6ea8d74a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2595,7 +2595,7 @@ spiderpy==1.6.1 spotipy==2.23.0 # homeassistant.components.sql -sqlparse==0.4.4 +sqlparse==0.5.0 # homeassistant.components.srp_energy srpenergy==1.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ced79b46ae8..930aa27bcfb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1999,7 +1999,7 @@ spiderpy==1.6.1 spotipy==2.23.0 # homeassistant.components.sql -sqlparse==0.4.4 +sqlparse==0.5.0 # homeassistant.components.srp_energy srpenergy==1.3.6 From 8207fc29d28b7ef4f9865774f87ac3cb3ac7df79 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 17 Apr 2024 02:10:06 -0500 Subject: [PATCH 298/426] Bump aiohttp to 3.9.5 (#115727) changelog: https://github.com/aio-libs/aiohttp/compare/v3.9.4...v3.9.5 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 49c7cf4a992..b2f55381f4d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.0 aiodiscover==2.0.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.3.1 -aiohttp==3.9.4 +aiohttp==3.9.5 aiohttp_cors==0.7.0 astral==2.2 async-interrupt==1.1.1 diff --git a/pyproject.toml b/pyproject.toml index 3a206a3f7fa..3cb894e2342 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] requires-python = ">=3.12.0" dependencies = [ - "aiohttp==3.9.4", + "aiohttp==3.9.5", "aiohttp_cors==0.7.0", "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-zlib-ng==0.3.1", diff --git a/requirements.txt b/requirements.txt index 5635c4912e9..38bea26a8b6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ -c homeassistant/package_constraints.txt # Home Assistant Core -aiohttp==3.9.4 +aiohttp==3.9.5 aiohttp_cors==0.7.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.3.1 From db31a526e5dcb1a90afc4fa6c605bb3c3c070e82 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 17 Apr 2024 14:23:47 +0200 Subject: [PATCH 299/426] Bump renault-api to 0.2.2 (#115738) --- homeassistant/components/renault/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 98e1c8b1e7c..9891c838950 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["renault_api"], "quality_scale": "platinum", - "requirements": ["renault-api==0.2.1"] + "requirements": ["renault-api==0.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index cbe6ea8d74a..5a728ac8429 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2429,7 +2429,7 @@ refoss-ha==1.2.0 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.1 +renault-api==0.2.2 # homeassistant.components.renson renson-endura-delta==1.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 930aa27bcfb..5faf622a9c4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1875,7 +1875,7 @@ refoss-ha==1.2.0 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.1 +renault-api==0.2.2 # homeassistant.components.renson renson-endura-delta==1.7.1 From c4b504ce395bd694ded715a19986be210ed2607e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 17 Apr 2024 15:00:10 +0200 Subject: [PATCH 300/426] Fix homeworks import flow (#115761) --- .../components/homeworks/config_flow.py | 10 +----- .../components/homeworks/test_config_flow.py | 32 +------------------ 2 files changed, 2 insertions(+), 40 deletions(-) diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index b2fe4e0e022..e54bbc61141 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -565,15 +565,7 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): CONF_KEYPADS: [ { CONF_ADDR: keypad[CONF_ADDR], - CONF_BUTTONS: [ - { - CONF_LED: button[CONF_LED], - CONF_NAME: button[CONF_NAME], - CONF_NUMBER: button[CONF_NUMBER], - CONF_RELEASE_DELAY: button[CONF_RELEASE_DELAY], - } - for button in keypad[CONF_BUTTONS] - ], + CONF_BUTTONS: [], CONF_NAME: keypad[CONF_NAME], } for keypad in config[CONF_KEYPADS] diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 4bdb5938f1c..a4159c9b693 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -9,7 +9,6 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.homeworks.const import ( CONF_ADDR, - CONF_BUTTONS, CONF_DIMMERS, CONF_INDEX, CONF_KEYPADS, @@ -161,26 +160,6 @@ async def test_import_flow( { CONF_ADDR: "[02:08:02:01]", CONF_NAME: "Foyer Keypad", - CONF_BUTTONS: [ - { - CONF_NAME: "Morning", - CONF_NUMBER: 1, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Relax", - CONF_NUMBER: 2, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Dim up", - CONF_NUMBER: 3, - CONF_LED: False, - CONF_RELEASE_DELAY: 0.2, - }, - ], } ], }, @@ -207,16 +186,7 @@ async def test_import_flow( "keypads": [ { "addr": "[02:08:02:01]", - "buttons": [ - { - "led": True, - "name": "Morning", - "number": 1, - "release_delay": None, - }, - {"led": True, "name": "Relax", "number": 2, "release_delay": None}, - {"led": False, "name": "Dim up", "number": 3, "release_delay": 0.2}, - ], + "buttons": [], "name": "Foyer Keypad", } ], From 851a5497b4533a02081e9fc3eb5e7279ea024d3f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 17 Apr 2024 15:04:37 +0200 Subject: [PATCH 301/426] Allow [##:##:##] type keypad address in homeworks (#115762) Allow [##:##:##] type keypad address --- homeassistant/components/homeworks/config_flow.py | 2 +- tests/components/homeworks/test_config_flow.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index e54bbc61141..b9515c306d6 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -93,7 +93,7 @@ BUTTON_EDIT = { } -validate_addr = cv.matches_regex(r"\[\d\d:\d\d:\d\d:\d\d\]") +validate_addr = cv.matches_regex(r"\[(?:\d\d:)?\d\d:\d\d:\d\d\]") async def validate_add_controller( diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index a4159c9b693..a66e743fcd6 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -544,8 +544,12 @@ async def test_options_add_remove_light_flow( ) +@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"]) async def test_options_add_remove_keypad_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homeworks: MagicMock, + keypad_address: str, ) -> None: """Test options flow to add and remove a keypad.""" mock_config_entry.add_to_hass(hass) @@ -566,7 +570,7 @@ async def test_options_add_remove_keypad_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - CONF_ADDR: "[02:08:03:01]", + CONF_ADDR: keypad_address, CONF_NAME: "Hall Keypad", }, ) @@ -592,7 +596,7 @@ async def test_options_add_remove_keypad_flow( ], "name": "Foyer Keypad", }, - {"addr": "[02:08:03:01]", "buttons": [], "name": "Hall Keypad"}, + {"addr": keypad_address, "buttons": [], "name": "Hall Keypad"}, ], "port": 1234, } @@ -612,7 +616,7 @@ async def test_options_add_remove_keypad_flow( assert result["step_id"] == "remove_keypad" assert result["data_schema"].schema["index"].options == { "0": "Foyer Keypad ([02:08:02:01])", - "1": "Hall Keypad ([02:08:03:01])", + "1": f"Hall Keypad ({keypad_address})", } result = await hass.config_entries.options.async_configure( @@ -625,7 +629,7 @@ async def test_options_add_remove_keypad_flow( {"addr": "[02:08:01:01]", "name": "Foyer Sconces", "rate": 1.0}, ], "host": "192.168.0.1", - "keypads": [{"addr": "[02:08:03:01]", "buttons": [], "name": "Hall Keypad"}], + "keypads": [{"addr": keypad_address, "buttons": [], "name": "Hall Keypad"}], "port": 1234, } await hass.async_block_till_done() From 40884473030cd82267dcb4678a4fca500a8274cf Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Fri, 19 Apr 2024 16:45:19 +0200 Subject: [PATCH 302/426] Add missing media_player features to Samsung TV (#115788) * add missing features * fix snapshot --- .../components/samsungtv/media_player.py | 16 +++++++++------- .../samsungtv/snapshots/test_init.ambr | 4 ++-- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/samsungtv/media_player.py b/homeassistant/components/samsungtv/media_player.py index 36715c44a9b..ff347431a4a 100644 --- a/homeassistant/components/samsungtv/media_player.py +++ b/homeassistant/components/samsungtv/media_player.py @@ -46,15 +46,17 @@ from .triggers.turn_on import async_get_turn_on_trigger SOURCES = {"TV": "KEY_TV", "HDMI": "KEY_HDMI"} SUPPORT_SAMSUNGTV = ( - MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.VOLUME_STEP - | MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.SELECT_SOURCE - | MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.TURN_OFF + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP ) diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 404b9a6b3af..1b8cf4c999d 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -9,7 +9,7 @@ 'TV', 'HDMI', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'media_player.any', @@ -51,7 +51,7 @@ 'original_name': None, 'platform': 'samsungtv', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'sample-entry-id', 'unit_of_measurement': None, From 6464218e5974f6c9ad276e1caae7b7a8bdad4176 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 19 Apr 2024 06:36:43 +0200 Subject: [PATCH 303/426] Bump aiounifi to v75 (#115819) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 05dc2189908..305400a4b9d 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==74"], + "requirements": ["aiounifi==75"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 5a728ac8429..ea29da2a855 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -392,7 +392,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==75 # homeassistant.components.vlc_telnet aiovlc==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5faf622a9c4..8411f66e540 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -365,7 +365,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==75 # homeassistant.components.vlc_telnet aiovlc==0.1.0 From 32f82d480f03f89355489ddc2feee38b7b0e3153 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 19 Apr 2024 11:24:54 -0500 Subject: [PATCH 304/426] Ensure scripts with timeouts of zero timeout immediately (#115830) --- homeassistant/helpers/script.py | 25 ++++- tests/helpers/test_script.py | 178 ++++++++++++++++++++++++++++++++ 2 files changed, 198 insertions(+), 5 deletions(-) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index b4e02e0e4ad..2b0eb90827e 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -656,6 +656,12 @@ class _ScriptRun: # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): self._variables["wait"]["completed"] = True + self._changed() + return + + if timeout == 0: + self._changed() + self._async_handle_timeout() return futures, timeout_handle, timeout_future = self._async_futures_with_timeout( @@ -1085,6 +1091,11 @@ class _ScriptRun: self._variables["wait"] = {"remaining": timeout, "trigger": None} trace_set_result(wait=self._variables["wait"]) + if timeout == 0: + self._changed() + self._async_handle_timeout() + return + futures, timeout_handle, timeout_future = self._async_futures_with_timeout( timeout ) @@ -1115,6 +1126,14 @@ class _ScriptRun: futures, timeout_handle, timeout_future, remove_triggers ) + def _async_handle_timeout(self) -> None: + """Handle timeout.""" + self._variables["wait"]["remaining"] = 0.0 + if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): + self._log(_TIMEOUT_MSG) + trace_set_result(wait=self._variables["wait"], timeout=True) + raise _AbortScript from TimeoutError() + async def _async_wait_with_optional_timeout( self, futures: list[asyncio.Future[None]], @@ -1125,11 +1144,7 @@ class _ScriptRun: try: await asyncio.wait(futures, return_when=asyncio.FIRST_COMPLETED) if timeout_future and timeout_future.done(): - self._variables["wait"]["remaining"] = 0.0 - if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): - self._log(_TIMEOUT_MSG) - trace_set_result(wait=self._variables["wait"], timeout=True) - raise _AbortScript from TimeoutError() + self._async_handle_timeout() finally: if timeout_future and not timeout_future.done() and timeout_handle: timeout_handle.cancel() diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 409b3639d43..16db9fb7b05 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -1311,6 +1311,184 @@ async def test_wait_timeout( assert_action_trace(expected_trace) +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": True, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + @pytest.mark.parametrize( ("continue_on_timeout", "n_events"), [(False, 0), (True, 1), (None, 1)] ) From 13ed2d291931c44bfa5116e13377a32464a237de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A5le=20Stor=C3=B8=20Hauknes?= Date: Tue, 23 Apr 2024 15:53:31 +0200 Subject: [PATCH 305/426] Fix KeyError error when fetching sensors (Airthings) (#115844) --- homeassistant/components/airthings/sensor.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/homeassistant/components/airthings/sensor.py b/homeassistant/components/airthings/sensor.py index fc91d816aca..f0a3dc5be8f 100644 --- a/homeassistant/components/airthings/sensor.py +++ b/homeassistant/components/airthings/sensor.py @@ -157,3 +157,11 @@ class AirthingsHeaterEnergySensor( def native_value(self) -> StateType: """Return the value reported by the sensor.""" return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return] + + @property + def available(self) -> bool: + """Check if device and sensor is available in data.""" + return ( + super().available + and self.entity_description.key in self.coordinator.data[self._id].sensors + ) From b8b2f6427a0c841c17b320f7b79b591350ecf785 Mon Sep 17 00:00:00 2001 From: jjlawren Date: Sat, 20 Apr 2024 05:36:03 -0500 Subject: [PATCH 306/426] Bump plexapi to 4.15.12 (#115872) --- homeassistant/components/plex/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 85362371715..ff0ab39b150 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.11", + "PlexAPI==4.15.12", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/requirements_all.txt b/requirements_all.txt index ea29da2a855..4a4e9c94c69 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ Mastodon.py==1.8.1 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8411f66e540..4acfa310c94 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -39,7 +39,7 @@ HATasmota==0.8.0 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 From c9c7c7803e6097c0d555032d73e36ef34b1556e8 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 21 Apr 2024 15:52:47 -0700 Subject: [PATCH 307/426] Bump ical to 8.0.0 (#115907) Co-authored-by: J. Nick Koston --- homeassistant/components/google/manifest.json | 2 +- homeassistant/components/local_calendar/manifest.json | 2 +- homeassistant/components/local_todo/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 00561cb5fd6..ac43dc58953 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/calendar.google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==7.0.3"] + "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==8.0.0"] } diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 1c13970503d..b1c7d6a3a34 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 3bcb8af9f43..44c76a56a8f 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4a4e9c94c69..2228c9d1bd6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1118,7 +1118,7 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4acfa310c94..be285822e63 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -908,7 +908,7 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 From 036b6fca25d7339ae17f2285a810dea7889f0ef9 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 21 Apr 2024 11:44:58 +0200 Subject: [PATCH 308/426] Fix geo location attributes of Tankerkoenig sensors (#115914) * geo location attributes needs to be float * make mypy happy --- homeassistant/components/tankerkoenig/sensor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tankerkoenig/sensor.py b/homeassistant/components/tankerkoenig/sensor.py index f2fdc2c45b7..33476e75262 100644 --- a/homeassistant/components/tankerkoenig/sensor.py +++ b/homeassistant/components/tankerkoenig/sensor.py @@ -91,7 +91,7 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): self._fuel_type = fuel_type self._attr_translation_key = fuel_type self._attr_unique_id = f"{station.id}_{fuel_type}" - attrs = { + attrs: dict[str, int | str | float | None] = { ATTR_BRAND: station.brand, ATTR_FUEL_TYPE: fuel_type, ATTR_STATION_NAME: station.name, @@ -102,8 +102,8 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): } if coordinator.show_on_map: - attrs[ATTR_LATITUDE] = str(station.lat) - attrs[ATTR_LONGITUDE] = str(station.lng) + attrs[ATTR_LATITUDE] = station.lat + attrs[ATTR_LONGITUDE] = station.lng self._attr_extra_state_attributes = attrs @property From b521acb72404bb57b3422978baf4d216f0ecb011 Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Mon, 22 Apr 2024 11:46:12 -0400 Subject: [PATCH 309/426] Use start helper in squeezebox for server discovery (#115978) --- homeassistant/components/squeezebox/media_player.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 007d880a263..d9478b6747d 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -28,7 +28,6 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, CONF_USERNAME, - EVENT_HOMEASSISTANT_START, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -44,6 +43,7 @@ from homeassistant.helpers.dispatcher import ( ) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.start import async_at_start from homeassistant.util.dt import utcnow from .browse_media import ( @@ -207,12 +207,7 @@ async def async_setup_entry( platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running - if hass.is_running: - hass.async_create_task(start_server_discovery(hass)) - else: - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_START, start_server_discovery(hass) - ) + config_entry.async_on_unload(async_at_start(hass, start_server_discovery)) class SqueezeBoxEntity(MediaPlayerEntity): From 4d551d68c6f8e86380b91c30671d4a7d13ab9ae1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 23 Apr 2024 20:12:21 +0200 Subject: [PATCH 310/426] Bump version to 2024.4.4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index ecfc1c6259c..892d16ba008 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,7 +18,7 @@ from .util.signal_type import SignalType APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "3" +PATCH_VERSION: Final = "4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 3cb894e2342..b6206f107f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.4.3" +version = "2024.4.4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 0ed48c844d1ae3e809f3615b5687617d501d90a7 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Tue, 23 Apr 2024 21:06:06 +0200 Subject: [PATCH 311/426] Bump mozart-api to 3.4.1.8.5 (#113745) --- .../components/bang_olufsen/__init__.py | 27 ++++++++++++------- .../components/bang_olufsen/manifest.json | 2 +- .../components/bang_olufsen/media_player.py | 4 ++- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 23 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/bang_olufsen/__init__.py b/homeassistant/components/bang_olufsen/__init__.py index 2488c2e64f5..07b9d0befe1 100644 --- a/homeassistant/components/bang_olufsen/__init__.py +++ b/homeassistant/components/bang_olufsen/__init__.py @@ -4,7 +4,11 @@ from __future__ import annotations from dataclasses import dataclass -from aiohttp.client_exceptions import ClientConnectorError +from aiohttp.client_exceptions import ( + ClientConnectorError, + ClientOSError, + ServerTimeoutError, +) from mozart_api.exceptions import ApiException from mozart_api.mozart_client import MozartClient @@ -44,12 +48,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: model=entry.data[CONF_MODEL], ) - client = MozartClient(host=entry.data[CONF_HOST], websocket_reconnect=True) + client = MozartClient(host=entry.data[CONF_HOST]) - # Check connection and try to initialize it. + # Check API and WebSocket connection try: - await client.get_battery_state(_request_timeout=3) - except (ApiException, ClientConnectorError, TimeoutError) as error: + await client.check_device_connection(True) + except* ( + ClientConnectorError, + ClientOSError, + ServerTimeoutError, + ApiException, + TimeoutError, + ) as error: await client.close_api_client() raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error @@ -61,11 +71,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client, ) - # Check and start WebSocket connection - if not await client.connect_notifications(remote_control=True): - raise ConfigEntryNotReady( - f"Unable to connect to {entry.title} WebSocket notification channel" - ) + # Start WebSocket connection + await client.connect_notifications(remote_control=True, reconnect=True) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index 3c920a99d7f..f2b31293227 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==3.2.1.150.6"], + "requirements": ["mozart-api==3.4.1.8.5"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 935c057efc8..9f55790d711 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -363,7 +363,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): def is_volume_muted(self) -> bool | None: """Boolean if volume is currently muted.""" if self._volume.muted and self._volume.muted.muted: - return self._volume.muted.muted + # The any return here is side effect of pydantic v2 compatibility + # This will be fixed in the future. + return self._volume.muted.muted # type: ignore[no-any-return] return None @property diff --git a/requirements_all.txt b/requirements_all.txt index 240606435ba..f212a8675e8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1332,7 +1332,7 @@ motionblindsble==0.0.9 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.2.1.150.6 +mozart-api==3.4.1.8.5 # homeassistant.components.mullvad mullvad-api==1.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 51161b1afd3..8f318a24b5e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1074,7 +1074,7 @@ motionblindsble==0.0.9 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.2.1.150.6 +mozart-api==3.4.1.8.5 # homeassistant.components.mullvad mullvad-api==1.0.0 From 61cf7e851b7dd5555b63a9d986706eed4da37200 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 23 Apr 2024 21:13:32 +0200 Subject: [PATCH 312/426] Update pipdeptree to 2.17.0 (#116049) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index e42a94091ad..10812a87c6e 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -16,7 +16,7 @@ pre-commit==3.7.0 pydantic==1.10.12 pylint==3.1.0 pylint-per-file-ignores==1.3.2 -pipdeptree==2.16.1 +pipdeptree==2.17.0 pytest-asyncio==0.23.6 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 From 991e479dacdf00fff033d89ecb9179531a7b38ec Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 23 Apr 2024 21:26:00 +0200 Subject: [PATCH 313/426] Update coverage to 7.5.0 (#116048) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index 10812a87c6e..233c8c1534e 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -8,7 +8,7 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt astroid==3.1.0 -coverage==7.4.4 +coverage==7.5.0 freezegun==1.4.0 mock-open==1.4.0 mypy-dev==1.10.0a3 From 46ec8a85b60b63a62e70e5c4f52bd9a5cf4b4244 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 21:31:08 +0200 Subject: [PATCH 314/426] Pass the job type when setting up homekit state change listeners (#116038) --- homeassistant/components/homekit/accessories.py | 8 +++++++- homeassistant/components/homekit/type_cameras.py | 3 +++ homeassistant/components/homekit/type_covers.py | 9 ++++++++- homeassistant/components/homekit/type_humidifiers.py | 9 ++++++++- 4 files changed, 26 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homekit/accessories.py b/homeassistant/components/homekit/accessories.py index f2e1a26b3de..40e86efe6a9 100644 --- a/homeassistant/components/homekit/accessories.py +++ b/homeassistant/components/homekit/accessories.py @@ -46,6 +46,7 @@ from homeassistant.core import ( Context, Event, EventStateChangedData, + HassJobType, HomeAssistant, State, callback as ha_callback, @@ -436,7 +437,10 @@ class HomeAccessory(Accessory): # type: ignore[misc] self._update_available_from_state(state) self._subscriptions.append( async_track_state_change_event( - self.hass, [self.entity_id], self.async_update_event_state_callback + self.hass, + [self.entity_id], + self.async_update_event_state_callback, + job_type=HassJobType.Callback, ) ) @@ -456,6 +460,7 @@ class HomeAccessory(Accessory): # type: ignore[misc] self.hass, [self.linked_battery_sensor], self.async_update_linked_battery_callback, + job_type=HassJobType.Callback, ) ) elif state is not None: @@ -468,6 +473,7 @@ class HomeAccessory(Accessory): # type: ignore[misc] self.hass, [self.linked_battery_charging_sensor], self.async_update_linked_battery_charging_callback, + job_type=HassJobType.Callback, ) ) elif battery_charging_state is None and state is not None: diff --git a/homeassistant/components/homekit/type_cameras.py b/homeassistant/components/homekit/type_cameras.py index d14fef8eabf..4f05bfbd687 100644 --- a/homeassistant/components/homekit/type_cameras.py +++ b/homeassistant/components/homekit/type_cameras.py @@ -20,6 +20,7 @@ from homeassistant.const import STATE_ON from homeassistant.core import ( Event, EventStateChangedData, + HassJobType, HomeAssistant, State, callback, @@ -272,6 +273,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self.hass, [self.linked_motion_sensor], self._async_update_motion_state_event, + job_type=HassJobType.Callback, ) ) @@ -282,6 +284,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self.hass, [self.linked_doorbell_sensor], self._async_update_doorbell_state_event, + job_type=HassJobType.Callback, ) ) diff --git a/homeassistant/components/homekit/type_covers.py b/homeassistant/components/homekit/type_covers.py index d14713b5f05..29dda418665 100644 --- a/homeassistant/components/homekit/type_covers.py +++ b/homeassistant/components/homekit/type_covers.py @@ -34,7 +34,13 @@ from homeassistant.const import ( STATE_OPEN, STATE_OPENING, ) -from homeassistant.core import Event, EventStateChangedData, State, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback, +) from homeassistant.helpers.event import async_track_state_change_event from .accessories import TYPES, HomeAccessory @@ -136,6 +142,7 @@ class GarageDoorOpener(HomeAccessory): self.hass, [self.linked_obstruction_sensor], self._async_update_obstruction_event, + job_type=HassJobType.Callback, ) ) diff --git a/homeassistant/components/homekit/type_humidifiers.py b/homeassistant/components/homekit/type_humidifiers.py index 1fca441e800..5bdf5950f18 100644 --- a/homeassistant/components/homekit/type_humidifiers.py +++ b/homeassistant/components/homekit/type_humidifiers.py @@ -25,7 +25,13 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_ON, ) -from homeassistant.core import Event, EventStateChangedData, State, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback, +) from homeassistant.helpers.event import async_track_state_change_event from .accessories import TYPES, HomeAccessory @@ -184,6 +190,7 @@ class HumidifierDehumidifier(HomeAccessory): self.hass, [self.linked_humidity_sensor], self.async_update_current_humidity_event, + job_type=HassJobType.Callback, ) ) From 3e0a45eee267b923db76715f2d238c586e32ba56 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 23 Apr 2024 21:36:36 +0200 Subject: [PATCH 315/426] Update requests_mock to 1.12.1 (#116050) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index 233c8c1534e..5470bc2a49d 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -29,7 +29,7 @@ pytest-unordered==0.6.0 pytest-picked==0.5.0 pytest-xdist==3.5.0 pytest==8.1.1 -requests-mock==1.11.0 +requests-mock==1.12.1 respx==0.21.0 syrupy==4.6.1 tqdm==4.66.2 From 8bf3c87336f9d04602c0bff3feb5021eedae2ff0 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 23 Apr 2024 21:45:20 +0200 Subject: [PATCH 316/426] Breakout heartbeat monitor and poe command queue in UniFi (#112529) * Split out entity helper functionality to own class * Split out heartbeat to own class * Break out poe command * Make more parts private * Make more things private and simplify naming * Sort initialize * Fix ruff --- .../components/unifi/device_tracker.py | 12 +- .../components/unifi/hub/entity_helper.py | 156 ++++++++++++++++++ homeassistant/components/unifi/hub/hub.py | 110 ++++-------- homeassistant/components/unifi/sensor.py | 4 +- homeassistant/components/unifi/switch.py | 2 +- 5 files changed, 193 insertions(+), 91 deletions(-) create mode 100644 homeassistant/components/unifi/hub/entity_helper.py diff --git a/homeassistant/components/unifi/device_tracker.py b/homeassistant/components/unifi/device_tracker.py index a41d1942536..dc48b9c31fe 100644 --- a/homeassistant/components/unifi/device_tracker.py +++ b/homeassistant/components/unifi/device_tracker.py @@ -240,7 +240,7 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): self._ignore_events = False self._is_connected = description.is_connected_fn(self.hub, self._obj_id) if self.is_connected: - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -301,12 +301,12 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): # From unifi.entity.async_signal_reachable_callback # Controller connection state has changed and entity is unavailable # Cancel heartbeat - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) return if is_connected := description.is_connected_fn(self.hub, self._obj_id): self._is_connected = is_connected - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -319,12 +319,12 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): return if event.key in self._event_is_on: - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) self._is_connected = True self.async_write_ha_state() return - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + self.entity_description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -344,7 +344,7 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): async def async_will_remove_from_hass(self) -> None: """Disconnect object when removed.""" await super().async_will_remove_from_hass() - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) @property def extra_state_attributes(self) -> Mapping[str, Any] | None: diff --git a/homeassistant/components/unifi/hub/entity_helper.py b/homeassistant/components/unifi/hub/entity_helper.py new file mode 100644 index 00000000000..c4bcf237386 --- /dev/null +++ b/homeassistant/components/unifi/hub/entity_helper.py @@ -0,0 +1,156 @@ +"""UniFi Network entity helper.""" + +from __future__ import annotations + +from datetime import datetime, timedelta + +import aiounifi +from aiounifi.models.device import DeviceSetPoePortModeRequest + +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_call_later, async_track_time_interval +import homeassistant.util.dt as dt_util + + +class UnifiEntityHelper: + """UniFi Network integration handling platforms for entity registration.""" + + def __init__(self, hass: HomeAssistant, api: aiounifi.Controller) -> None: + """Initialize the UniFi entity loader.""" + self.hass = hass + self.api = api + + self._device_command = UnifiDeviceCommand(hass, api) + self._heartbeat = UnifiEntityHeartbeat(hass) + + @callback + def reset(self) -> None: + """Cancel timers.""" + self._device_command.reset() + self._heartbeat.reset() + + @callback + def initialize(self) -> None: + """Initialize entity helper.""" + self._heartbeat.initialize() + + @property + def signal_heartbeat(self) -> str: + """Event to signal new heartbeat missed.""" + return self._heartbeat.signal + + @callback + def update_heartbeat(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat.update(unique_id, heartbeat_expire_time) + + @callback + def remove_heartbeat(self, unique_id: str) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat.remove(unique_id) + + @callback + def queue_poe_port_command( + self, device_id: str, port_idx: int, poe_mode: str + ) -> None: + """Queue commands to execute them together per device.""" + self._device_command.queue_poe_command(device_id, port_idx, poe_mode) + + +class UnifiEntityHeartbeat: + """UniFi entity heartbeat monitor.""" + + CHECK_HEARTBEAT_INTERVAL = timedelta(seconds=1) + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the heartbeat monitor.""" + self.hass = hass + + self._cancel_heartbeat_check: CALLBACK_TYPE | None = None + self._heartbeat_time: dict[str, datetime] = {} + + @callback + def reset(self) -> None: + """Cancel timers.""" + if self._cancel_heartbeat_check: + self._cancel_heartbeat_check() + self._cancel_heartbeat_check = None + + @callback + def initialize(self) -> None: + """Initialize heartbeat monitor.""" + self._cancel_heartbeat_check = async_track_time_interval( + self.hass, self._check_for_stale, self.CHECK_HEARTBEAT_INTERVAL + ) + + @property + def signal(self) -> str: + """Event to signal new heartbeat missed.""" + return "unifi-heartbeat-missed" + + @callback + def update(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat_time[unique_id] = heartbeat_expire_time + + @callback + def remove(self, unique_id: str) -> None: + """Remove device from heartbeat monitor.""" + self._heartbeat_time.pop(unique_id, None) + + @callback + def _check_for_stale(self, *_: datetime) -> None: + """Check for any devices scheduled to be marked disconnected.""" + now = dt_util.utcnow() + + unique_ids_to_remove = [] + for unique_id, heartbeat_expire_time in self._heartbeat_time.items(): + if now > heartbeat_expire_time: + async_dispatcher_send(self.hass, f"{self.signal}_{unique_id}") + unique_ids_to_remove.append(unique_id) + + for unique_id in unique_ids_to_remove: + del self._heartbeat_time[unique_id] + + +class UnifiDeviceCommand: + """UniFi Device command helper class.""" + + COMMAND_DELAY = 5 + + def __init__(self, hass: HomeAssistant, api: aiounifi.Controller) -> None: + """Initialize device command helper.""" + self.hass = hass + self.api = api + + self._command_queue: dict[str, dict[int, str]] = {} + self._cancel_command: CALLBACK_TYPE | None = None + + @callback + def reset(self) -> None: + """Cancel timers.""" + if self._cancel_command: + self._cancel_command() + self._cancel_command = None + + @callback + def queue_poe_command(self, device_id: str, port_idx: int, poe_mode: str) -> None: + """Queue commands to execute them together per device.""" + self.reset() + + device_queue = self._command_queue.setdefault(device_id, {}) + device_queue[port_idx] = poe_mode + + async def _command(now: datetime) -> None: + """Execute previously queued commands.""" + queue = self._command_queue.copy() + self._command_queue.clear() + for device_id, device_commands in queue.items(): + device = self.api.devices[device_id] + commands = list(device_commands.items()) + await self.api.request( + DeviceSetPoePortModeRequest.create(device, targets=commands) + ) + + self._cancel_command = async_call_later(self.hass, self.COMMAND_DELAY, _command) diff --git a/homeassistant/components/unifi/hub/hub.py b/homeassistant/components/unifi/hub/hub.py index df91584f267..f8c1f2517a2 100644 --- a/homeassistant/components/unifi/hub/hub.py +++ b/homeassistant/components/unifi/hub/hub.py @@ -2,13 +2,12 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import datetime import aiounifi -from aiounifi.models.device import DeviceSetPoePortModeRequest from homeassistant.config_entries import ConfigEntry -from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import ( DeviceEntry, @@ -16,16 +15,13 @@ from homeassistant.helpers.device_registry import ( DeviceInfo, ) from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_call_later, async_track_time_interval -import homeassistant.util.dt as dt_util from ..const import ATTR_MANUFACTURER, CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN, PLATFORMS from .config import UnifiConfig +from .entity_helper import UnifiEntityHelper from .entity_loader import UnifiEntityLoader from .websocket import UnifiWebsocket -CHECK_HEARTBEAT_INTERVAL = timedelta(seconds=1) - class UnifiHub: """Manages a single UniFi Network instance.""" @@ -38,17 +34,12 @@ class UnifiHub: self.api = api self.config = UnifiConfig.from_config_entry(config_entry) self.entity_loader = UnifiEntityLoader(self) + self._entity_helper = UnifiEntityHelper(hass, api) self.websocket = UnifiWebsocket(hass, api, self.signal_reachable) self.site = config_entry.data[CONF_SITE_ID] self.is_admin = False - self._cancel_heartbeat_check: CALLBACK_TYPE | None = None - self._heartbeat_time: dict[str, datetime] = {} - - self.poe_command_queue: dict[str, dict[int, str]] = {} - self._cancel_poe_command: CALLBACK_TYPE | None = None - @callback @staticmethod def get_hub(hass: HomeAssistant, config_entry: ConfigEntry) -> UnifiHub: @@ -61,6 +52,28 @@ class UnifiHub: """Websocket connection state.""" return self.websocket.available + @property + def signal_heartbeat_missed(self) -> str: + """Event to signal new heartbeat missed.""" + return self._entity_helper.signal_heartbeat + + @callback + def update_heartbeat(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._entity_helper.update_heartbeat(unique_id, heartbeat_expire_time) + + @callback + def remove_heartbeat(self, unique_id: str) -> None: + """Update device time in heartbeat monitor.""" + self._entity_helper.remove_heartbeat(unique_id) + + @callback + def queue_poe_port_command( + self, device_id: str, port_idx: int, poe_mode: str + ) -> None: + """Queue commands to execute them together per device.""" + self._entity_helper.queue_poe_port_command(device_id, port_idx, poe_mode) + @property def signal_reachable(self) -> str: """Integration specific event to signal a change in connection status.""" @@ -71,77 +84,16 @@ class UnifiHub: """Event specific per UniFi entry to signal new options.""" return f"unifi-options-{self.config.entry.entry_id}" - @property - def signal_heartbeat_missed(self) -> str: - """Event specific per UniFi device tracker to signal new heartbeat missed.""" - return "unifi-heartbeat-missed" - async def initialize(self) -> None: """Set up a UniFi Network instance.""" await self.entity_loader.initialize() + self._entity_helper.initialize() assert self.config.entry.unique_id is not None self.is_admin = self.api.sites[self.config.entry.unique_id].role == "admin" self.config.entry.add_update_listener(self.async_config_entry_updated) - self._cancel_heartbeat_check = async_track_time_interval( - self.hass, self._async_check_for_stale, CHECK_HEARTBEAT_INTERVAL - ) - - @callback - def async_heartbeat( - self, unique_id: str, heartbeat_expire_time: datetime | None = None - ) -> None: - """Signal when a device has fresh home state.""" - if heartbeat_expire_time is not None: - self._heartbeat_time[unique_id] = heartbeat_expire_time - return - - if unique_id in self._heartbeat_time: - del self._heartbeat_time[unique_id] - - @callback - def _async_check_for_stale(self, *_: datetime) -> None: - """Check for any devices scheduled to be marked disconnected.""" - now = dt_util.utcnow() - - unique_ids_to_remove = [] - for unique_id, heartbeat_expire_time in self._heartbeat_time.items(): - if now > heartbeat_expire_time: - async_dispatcher_send( - self.hass, f"{self.signal_heartbeat_missed}_{unique_id}" - ) - unique_ids_to_remove.append(unique_id) - - for unique_id in unique_ids_to_remove: - del self._heartbeat_time[unique_id] - - @callback - def async_queue_poe_port_command( - self, device_id: str, port_idx: int, poe_mode: str - ) -> None: - """Queue commands to execute them together per device.""" - if self._cancel_poe_command: - self._cancel_poe_command() - self._cancel_poe_command = None - - device_queue = self.poe_command_queue.setdefault(device_id, {}) - device_queue[port_idx] = poe_mode - - async def async_execute_command(now: datetime) -> None: - """Execute previously queued commands.""" - queue = self.poe_command_queue.copy() - self.poe_command_queue.clear() - for device_id, device_commands in queue.items(): - device = self.api.devices[device_id] - commands = list(device_commands.items()) - await self.api.request( - DeviceSetPoePortModeRequest.create(device, targets=commands) - ) - - self._cancel_poe_command = async_call_later(self.hass, 5, async_execute_command) - @property def device_info(self) -> DeviceInfo: """UniFi Network device info.""" @@ -205,12 +157,6 @@ class UnifiHub: if not unload_ok: return False - if self._cancel_heartbeat_check: - self._cancel_heartbeat_check() - self._cancel_heartbeat_check = None - - if self._cancel_poe_command: - self._cancel_poe_command() - self._cancel_poe_command = None + self._entity_helper.reset() return True diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index cec87b36416..17b3cae93fd 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -460,7 +460,7 @@ class UnifiSensorEntity(UnifiEntity[HandlerT, ApiItemT], SensorEntity): if description.is_connected_fn is not None: # Send heartbeat if client is connected if description.is_connected_fn(self.hub, self._obj_id): - self.hub.async_heartbeat( + self.hub.update_heartbeat( self._attr_unique_id, dt_util.utcnow() + self.hub.config.option_detection_time, ) @@ -485,4 +485,4 @@ class UnifiSensorEntity(UnifiEntity[HandlerT, ApiItemT], SensorEntity): if self.entity_description.is_connected_fn is not None: # Remove heartbeat registration - self.hub.async_heartbeat(self._attr_unique_id) + self.hub.remove_heartbeat(self._attr_unique_id) diff --git a/homeassistant/components/unifi/switch.py b/homeassistant/components/unifi/switch.py index 6e073a655a5..45357dd67d2 100644 --- a/homeassistant/components/unifi/switch.py +++ b/homeassistant/components/unifi/switch.py @@ -147,7 +147,7 @@ async def async_poe_port_control_fn(hub: UnifiHub, obj_id: str, target: bool) -> port = hub.api.ports[obj_id] on_state = "auto" if port.raw["poe_caps"] != 8 else "passthrough" state = on_state if target else "off" - hub.async_queue_poe_port_command(mac, int(index), state) + hub.queue_poe_port_command(mac, int(index), state) async def async_port_forward_control_fn( From bb2bd086bc12785a98b643f1c2e4c1f9bccb635b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 21:52:55 +0200 Subject: [PATCH 317/426] Add missing adapter data to Bluetooth config entry titles (#115930) --- homeassistant/components/bluetooth/__init__.py | 5 +++++ .../components/bluetooth/config_flow.py | 10 +--------- homeassistant/components/bluetooth/util.py | 18 +++++++++++++++++- tests/components/bluetooth/test_config_flow.py | 8 ++------ tests/components/bluetooth/test_init.py | 13 +++++++++++++ 5 files changed, 38 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/bluetooth/__init__.py b/homeassistant/components/bluetooth/__init__.py index 560fb0663a8..4768d58379a 100644 --- a/homeassistant/components/bluetooth/__init__.py +++ b/homeassistant/components/bluetooth/__init__.py @@ -86,6 +86,7 @@ from .manager import HomeAssistantBluetoothManager from .match import BluetoothCallbackMatcher, IntegrationMatcher from .models import BluetoothCallback, BluetoothChange from .storage import BluetoothStorage +from .util import adapter_title if TYPE_CHECKING: from homeassistant.helpers.typing import ConfigType @@ -332,6 +333,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) from err adapters = await manager.async_get_bluetooth_adapters() details = adapters[adapter] + if entry.title == address: + hass.config_entries.async_update_entry( + entry, title=adapter_title(adapter, details) + ) slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots)) await async_update_device(hass, entry, adapter, details) diff --git a/homeassistant/components/bluetooth/config_flow.py b/homeassistant/components/bluetooth/config_flow.py index 87038d48151..90d2624fb0f 100644 --- a/homeassistant/components/bluetooth/config_flow.py +++ b/homeassistant/components/bluetooth/config_flow.py @@ -12,7 +12,6 @@ from bluetooth_adapters import ( AdapterDetails, adapter_human_name, adapter_model, - adapter_unique_name, get_adapters, ) import voluptuous as vol @@ -28,6 +27,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType from . import models from .const import CONF_ADAPTER, CONF_DETAILS, CONF_PASSIVE, DOMAIN +from .util import adapter_title OPTIONS_SCHEMA = vol.Schema( { @@ -47,14 +47,6 @@ def adapter_display_info(adapter: str, details: AdapterDetails) -> str: return f"{name} {manufacturer} {model}" -def adapter_title(adapter: str, details: AdapterDetails) -> str: - """Return the adapter title.""" - unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS]) - model = adapter_model(details) - manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown" - return f"{manufacturer} {model} ({unique_name})" - - class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow for Bluetooth.""" diff --git a/homeassistant/components/bluetooth/util.py b/homeassistant/components/bluetooth/util.py index 0faac9a8613..8c7ad13294a 100644 --- a/homeassistant/components/bluetooth/util.py +++ b/homeassistant/components/bluetooth/util.py @@ -2,7 +2,14 @@ from __future__ import annotations -from bluetooth_adapters import BluetoothAdapters +from bluetooth_adapters import ( + ADAPTER_ADDRESS, + ADAPTER_MANUFACTURER, + ADAPTER_PRODUCT, + AdapterDetails, + BluetoothAdapters, + adapter_unique_name, +) from bluetooth_data_tools import monotonic_time_coarse from homeassistant.core import callback @@ -69,3 +76,12 @@ def async_load_history_from_system( connectable_loaded_history[address] = service_info return all_loaded_history, connectable_loaded_history + + +@callback +def adapter_title(adapter: str, details: AdapterDetails) -> str: + """Return the adapter title.""" + unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS]) + model = details.get(ADAPTER_PRODUCT, "Unknown") + manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown" + return f"{manufacturer} {model} ({unique_name})" diff --git a/tests/components/bluetooth/test_config_flow.py b/tests/components/bluetooth/test_config_flow.py index d044be76e6d..33474280ec4 100644 --- a/tests/components/bluetooth/test_config_flow.py +++ b/tests/components/bluetooth/test_config_flow.py @@ -99,9 +99,7 @@ async def test_async_step_user_linux_one_adapter( result["flow_id"], user_input={} ) assert result2["type"] is FlowResultType.CREATE_ENTRY - assert ( - result2["title"] == "ACME Bluetooth Adapter 5.0 (cc01:aa01) (00:00:00:00:00:01)" - ) + assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)" assert result2["data"] == {} assert len(mock_setup_entry.mock_calls) == 1 @@ -144,9 +142,7 @@ async def test_async_step_user_linux_two_adapters( result["flow_id"], user_input={CONF_ADAPTER: "hci1"} ) assert result2["type"] is FlowResultType.CREATE_ENTRY - assert ( - result2["title"] == "ACME Bluetooth Adapter 5.0 (cc01:aa01) (00:00:00:00:00:02)" - ) + assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:02)" assert result2["data"] == {} assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index 82fa0341966..8c26745d541 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3173,3 +3173,16 @@ async def test_haos_9_or_later( registry = async_get_issue_registry(hass) issue = registry.async_get_issue(DOMAIN, "haos_outdated") assert issue is None + + +async def test_title_updated_if_mac_address( + hass: HomeAssistant, mock_bleak_scanner_start: MagicMock, one_adapter: None +) -> None: + """Test the title is updated if it is the mac address.""" + entry = MockConfigEntry( + domain="bluetooth", title="00:00:00:00:00:01", unique_id="00:00:00:00:00:01" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.title == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)" From b5bd25d4fb1e1e7f68a5799b9d73c87c4c59361d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 23 Apr 2024 21:54:13 +0200 Subject: [PATCH 318/426] Add entity translations to totalconnect (#115950) --- .../totalconnect/alarm_control_panel.py | 23 ++- .../components/totalconnect/binary_sensor.py | 9 +- .../components/totalconnect/entity.py | 10 +- .../components/totalconnect/strings.json | 7 + .../snapshots/test_alarm_control_panel.ambr | 12 +- .../snapshots/test_binary_sensor.ambr | 174 +++++++++--------- .../totalconnect/test_binary_sensor.py | 14 +- 7 files changed, 128 insertions(+), 121 deletions(-) diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index b0ad2f19069..1de9db1d319 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -92,17 +92,17 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): Add _# for partition 2 and beyond. """ if partition_id == 1: - self._attr_name = self.device.name + self._attr_name = None self._attr_unique_id = str(location.location_id) else: - self._attr_name = f"{self.device.name} partition {partition_id}" + self._attr_translation_key = "partition" + self._attr_translation_placeholders = {"partition_id": str(partition_id)} self._attr_unique_id = f"{location.location_id}_{partition_id}" @property def state(self) -> str | None: """Return the state of the device.""" attr = { - "location_name": self.name, "location_id": self._location.location_id, "partition": self._partition_id, "ac_loss": self._location.ac_loss, @@ -112,6 +112,11 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): "triggered_zone": None, } + if self._partition_id == 1: + attr["location_name"] = self.device.name + else: + attr["location_name"] = f"{self.device.name} partition {self._partition_id}" + state: str | None = None if self._partition.arming_state.is_disarmed(): state = STATE_ALARM_DISARMED @@ -152,7 +157,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to disarm {self.name}." + f"TotalConnect failed to disarm {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -171,7 +176,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home {self.name}." + f"TotalConnect failed to arm home {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -190,7 +195,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away {self.name}." + f"TotalConnect failed to arm away {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -209,7 +214,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm night {self.name}." + f"TotalConnect failed to arm night {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -228,7 +233,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home instant {self.name}." + f"TotalConnect failed to arm home instant {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -247,7 +252,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away instant {self.name}." + f"TotalConnect failed to arm away instant {self.device.name}." ) from error await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 9ff25e07d03..85461805124 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -54,7 +54,7 @@ def get_security_zone_device_class(zone: TotalConnectZone) -> BinarySensorDevice SECURITY_BINARY_SENSOR = TotalConnectZoneBinarySensorEntityDescription( key=ZONE, - name="", + name=None, device_class_fn=get_security_zone_device_class, is_on_fn=lambda zone: zone.is_faulted() or zone.is_triggered(), ) @@ -64,14 +64,12 @@ NO_BUTTON_BINARY_SENSORS: tuple[TotalConnectZoneBinarySensorEntityDescription, . key=LOW_BATTERY, device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", is_on_fn=lambda zone: zone.is_low_battery(), ), TotalConnectZoneBinarySensorEntityDescription( key=TAMPER, device_class=BinarySensorDeviceClass.TAMPER, entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", is_on_fn=lambda zone: zone.is_tampered(), ), ) @@ -89,21 +87,18 @@ LOCATION_BINARY_SENSORS: tuple[TotalConnectAlarmBinarySensorEntityDescription, . key=LOW_BATTERY, device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", is_on_fn=lambda location: location.is_low_battery(), ), TotalConnectAlarmBinarySensorEntityDescription( key=TAMPER, device_class=BinarySensorDeviceClass.TAMPER, entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", is_on_fn=lambda location: location.is_cover_tampered(), ), TotalConnectAlarmBinarySensorEntityDescription( key=POWER, device_class=BinarySensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, - name=f" {POWER}", is_on_fn=lambda location: location.is_ac_loss(), ), ) @@ -161,7 +156,6 @@ class TotalConnectZoneBinarySensor(TotalConnectZoneEntity, BinarySensorEntity): """Initialize the TotalConnect status.""" super().__init__(coordinator, zone, location_id, entity_description.key) self.entity_description = entity_description - self._attr_name = f"{zone.description}{entity_description.name}" self._attr_extra_state_attributes = { "zone_id": zone.zoneid, "location_id": location_id, @@ -195,7 +189,6 @@ class TotalConnectAlarmBinarySensor(TotalConnectLocationEntity, BinarySensorEnti """Initialize the TotalConnect alarm device binary sensor.""" super().__init__(coordinator, location) self.entity_description = entity_description - self._attr_name = f"{self.device.name}{entity_description.name}" self._attr_unique_id = f"{location.location_id}_{entity_description.key}" self._attr_extra_state_attributes = { "location_id": location.location_id, diff --git a/homeassistant/components/totalconnect/entity.py b/homeassistant/components/totalconnect/entity.py index deef0c5aa2a..a18ffc14df5 100644 --- a/homeassistant/components/totalconnect/entity.py +++ b/homeassistant/components/totalconnect/entity.py @@ -12,6 +12,8 @@ from . import DOMAIN, TotalConnectDataUpdateCoordinator class TotalConnectEntity(CoordinatorEntity[TotalConnectDataUpdateCoordinator]): """Represent a TotalConnect entity.""" + _attr_has_entity_name = True + class TotalConnectLocationEntity(TotalConnectEntity): """Represent a TotalConnect location.""" @@ -24,11 +26,11 @@ class TotalConnectLocationEntity(TotalConnectEntity): """Initialize the TotalConnect location.""" super().__init__(coordinator) self._location = location - self.device = location.devices[location.security_device_id] + self.device = device = location.devices[location.security_device_id] self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self.device.serial_number)}, - name=self.device.name, - serial_number=self.device.serial_number, + identifiers={(DOMAIN, device.serial_number)}, + name=device.name, + serial_number=device.serial_number, ) diff --git a/homeassistant/components/totalconnect/strings.json b/homeassistant/components/totalconnect/strings.json index 922962c9866..03656b60084 100644 --- a/homeassistant/components/totalconnect/strings.json +++ b/homeassistant/components/totalconnect/strings.json @@ -49,5 +49,12 @@ "name": "Arm home instant", "description": "Arms Home with zero entry delay." } + }, + "entity": { + "alarm_control_panel": { + "partition": { + "name": "Partition {partition_id}" + } + } } } diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr index 4dc6b576ba3..8261cd74859 100644 --- a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -12,7 +12,7 @@ 'domain': 'alarm_control_panel', 'entity_category': None, 'entity_id': 'alarm_control_panel.test', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -23,7 +23,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'test', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': , @@ -70,7 +70,7 @@ 'domain': 'alarm_control_panel', 'entity_category': None, 'entity_id': 'alarm_control_panel.test_partition_2', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -81,11 +81,11 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'test partition 2', + 'original_name': 'Partition 2', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'partition', 'unique_id': '123456_2', 'unit_of_measurement': None, }) @@ -98,7 +98,7 @@ 'code_arm_required': True, 'code_format': None, 'cover_tampered': False, - 'friendly_name': 'test partition 2', + 'friendly_name': 'test Partition 2', 'location_id': '123456', 'location_name': 'test partition 2', 'low_battery': False, diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index a79f609488d..54089c6f192 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -12,7 +12,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.fire', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -23,7 +23,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Fire', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -49,7 +49,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.fire_low_battery-entry] +# name: test_entity_registry[binary_sensor.fire_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -61,8 +61,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.fire_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.fire_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -73,7 +73,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Fire low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -82,17 +82,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.fire_low_battery-state] +# name: test_entity_registry[binary_sensor.fire_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Fire low battery', + 'friendly_name': 'Fire Battery', 'location_id': '123456', 'partition': '1', 'zone_id': '2', }), 'context': , - 'entity_id': 'binary_sensor.fire_low_battery', + 'entity_id': 'binary_sensor.fire_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -112,7 +112,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.fire_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -123,7 +123,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Fire tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -136,7 +136,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Fire tamper', + 'friendly_name': 'Fire Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': '2', @@ -162,7 +162,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.gas', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -173,7 +173,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Gas', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -199,7 +199,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.gas_low_battery-entry] +# name: test_entity_registry[binary_sensor.gas_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -211,8 +211,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gas_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.gas_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -223,7 +223,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Gas low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -232,17 +232,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.gas_low_battery-state] +# name: test_entity_registry[binary_sensor.gas_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Gas low battery', + 'friendly_name': 'Gas Battery', 'location_id': '123456', 'partition': '1', 'zone_id': '3', }), 'context': , - 'entity_id': 'binary_sensor.gas_low_battery', + 'entity_id': 'binary_sensor.gas_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -262,7 +262,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.gas_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -273,7 +273,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Gas tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -286,7 +286,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Gas tamper', + 'friendly_name': 'Gas Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': '3', @@ -312,7 +312,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.medical', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -323,7 +323,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Medical', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -362,7 +362,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.motion', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -373,7 +373,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Motion', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -399,7 +399,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.motion_low_battery-entry] +# name: test_entity_registry[binary_sensor.motion_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -411,8 +411,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.motion_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.motion_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -423,7 +423,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Motion low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -432,17 +432,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.motion_low_battery-state] +# name: test_entity_registry[binary_sensor.motion_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Motion low battery', + 'friendly_name': 'Motion Battery', 'location_id': '123456', 'partition': '1', 'zone_id': '4', }), 'context': , - 'entity_id': 'binary_sensor.motion_low_battery', + 'entity_id': 'binary_sensor.motion_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -462,7 +462,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.motion_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -473,7 +473,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Motion tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -486,7 +486,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Motion tamper', + 'friendly_name': 'Motion Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': '4', @@ -512,7 +512,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.security', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -523,7 +523,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Security', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -549,7 +549,7 @@ 'state': 'on', }) # --- -# name: test_entity_registry[binary_sensor.security_low_battery-entry] +# name: test_entity_registry[binary_sensor.security_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -561,8 +561,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.security_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.security_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -573,7 +573,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Security low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -582,17 +582,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.security_low_battery-state] +# name: test_entity_registry[binary_sensor.security_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Security low battery', + 'friendly_name': 'Security Battery', 'location_id': '123456', 'partition': '1', 'zone_id': '1', }), 'context': , - 'entity_id': 'binary_sensor.security_low_battery', + 'entity_id': 'binary_sensor.security_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -612,7 +612,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.security_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -623,7 +623,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Security tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -636,7 +636,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Security tamper', + 'friendly_name': 'Security Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': '1', @@ -662,7 +662,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.temperature', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -673,7 +673,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Temperature', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -699,7 +699,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.temperature_low_battery-entry] +# name: test_entity_registry[binary_sensor.temperature_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -711,8 +711,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.temperature_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.temperature_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -723,7 +723,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Temperature low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -732,17 +732,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.temperature_low_battery-state] +# name: test_entity_registry[binary_sensor.temperature_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Temperature low battery', + 'friendly_name': 'Temperature Battery', 'location_id': '123456', 'partition': '1', 'zone_id': 7, }), 'context': , - 'entity_id': 'binary_sensor.temperature_low_battery', + 'entity_id': 'binary_sensor.temperature_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -762,7 +762,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.temperature_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -773,7 +773,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Temperature tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -786,7 +786,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Temperature tamper', + 'friendly_name': 'Temperature Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': 7, @@ -799,7 +799,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.test_low_battery-entry] +# name: test_entity_registry[binary_sensor.test_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -811,8 +811,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.test_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.test_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -823,7 +823,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'test low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -832,15 +832,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.test_low_battery-state] +# name: test_entity_registry[binary_sensor.test_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'test low battery', + 'friendly_name': 'test Battery', 'location_id': '123456', }), 'context': , - 'entity_id': 'binary_sensor.test_low_battery', + 'entity_id': 'binary_sensor.test_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -860,7 +860,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.test_power', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -871,7 +871,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'test power', + 'original_name': 'Power', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -884,7 +884,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'test power', + 'friendly_name': 'test Power', 'location_id': '123456', }), 'context': , @@ -908,7 +908,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.test_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -919,7 +919,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'test tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -932,7 +932,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'test tamper', + 'friendly_name': 'test Tamper', 'location_id': '123456', }), 'context': , @@ -956,7 +956,7 @@ 'domain': 'binary_sensor', 'entity_category': None, 'entity_id': 'binary_sensor.unknown', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -967,7 +967,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Unknown', + 'original_name': None, 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -993,7 +993,7 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.unknown_low_battery-entry] +# name: test_entity_registry[binary_sensor.unknown_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1005,8 +1005,8 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.unknown_low_battery', - 'has_entity_name': False, + 'entity_id': 'binary_sensor.unknown_battery', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -1017,7 +1017,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Unknown low battery', + 'original_name': 'Battery', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -1026,17 +1026,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_entity_registry[binary_sensor.unknown_low_battery-state] +# name: test_entity_registry[binary_sensor.unknown_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Unknown low battery', + 'friendly_name': 'Unknown Battery', 'location_id': '123456', 'partition': '1', 'zone_id': '6', }), 'context': , - 'entity_id': 'binary_sensor.unknown_low_battery', + 'entity_id': 'binary_sensor.unknown_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1056,7 +1056,7 @@ 'domain': 'binary_sensor', 'entity_category': , 'entity_id': 'binary_sensor.unknown_tamper', - 'has_entity_name': False, + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -1067,7 +1067,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Unknown tamper', + 'original_name': 'Tamper', 'platform': 'totalconnect', 'previous_unique_id': None, 'supported_features': 0, @@ -1080,7 +1080,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', - 'friendly_name': 'Unknown tamper', + 'friendly_name': 'Unknown Tamper', 'location_id': '123456', 'partition': '1', 'zone_id': '6', diff --git a/tests/components/totalconnect/test_binary_sensor.py b/tests/components/totalconnect/test_binary_sensor.py index 1a8a65391f5..dc433129ac8 100644 --- a/tests/components/totalconnect/test_binary_sensor.py +++ b/tests/components/totalconnect/test_binary_sensor.py @@ -17,9 +17,9 @@ from .common import RESPONSE_DISARMED, ZONE_NORMAL, setup_platform from tests.common import snapshot_platform ZONE_ENTITY_ID = "binary_sensor.security" -ZONE_LOW_BATTERY_ID = "binary_sensor.security_low_battery" +ZONE_LOW_BATTERY_ID = "binary_sensor.security_battery" ZONE_TAMPER_ID = "binary_sensor.security_tamper" -PANEL_BATTERY_ID = "binary_sensor.test_low_battery" +PANEL_BATTERY_ID = "binary_sensor.test_battery" PANEL_TAMPER_ID = "binary_sensor.test_tamper" PANEL_POWER_ID = "binary_sensor.test_power" @@ -49,7 +49,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: ) assert state.attributes.get("device_class") == BinarySensorDeviceClass.DOOR - state = hass.states.get(f"{ZONE_ENTITY_ID}_low_battery") + state = hass.states.get(f"{ZONE_ENTITY_ID}_battery") assert state.state == STATE_OFF state = hass.states.get(f"{ZONE_ENTITY_ID}_tamper") assert state.state == STATE_OFF @@ -58,7 +58,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.fire") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.SMOKE - state = hass.states.get("binary_sensor.fire_low_battery") + state = hass.states.get("binary_sensor.fire_battery") assert state.state == STATE_ON state = hass.states.get("binary_sensor.fire_tamper") assert state.state == STATE_OFF @@ -67,7 +67,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.gas") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.GAS - state = hass.states.get("binary_sensor.gas_low_battery") + state = hass.states.get("binary_sensor.gas_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.gas_tamper") assert state.state == STATE_ON @@ -76,7 +76,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.unknown") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.DOOR - state = hass.states.get("binary_sensor.unknown_low_battery") + state = hass.states.get("binary_sensor.unknown_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.unknown_tamper") assert state.state == STATE_OFF @@ -85,7 +85,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.temperature") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.PROBLEM - state = hass.states.get("binary_sensor.temperature_low_battery") + state = hass.states.get("binary_sensor.temperature_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.temperature_tamper") assert state.state == STATE_OFF From d08bb96d00f0377180e09152f3d30c95f5cbf7c2 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 23 Apr 2024 21:59:03 +0200 Subject: [PATCH 319/426] Deprecate Unify Circuit integration (#115528) Co-authored-by: TheJulianJES --- homeassistant/components/circuit/__init__.py | 12 ++++++++++++ homeassistant/components/circuit/strings.json | 8 ++++++++ 2 files changed, 20 insertions(+) create mode 100644 homeassistant/components/circuit/strings.json diff --git a/homeassistant/components/circuit/__init__.py b/homeassistant/components/circuit/__init__.py index f71babad3d5..7e7d0eda76e 100644 --- a/homeassistant/components/circuit/__init__.py +++ b/homeassistant/components/circuit/__init__.py @@ -5,6 +5,7 @@ import voluptuous as vol from homeassistant.const import CONF_NAME, CONF_URL, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, discovery +import homeassistant.helpers.issue_registry as ir from homeassistant.helpers.typing import ConfigType DOMAIN = "circuit" @@ -26,6 +27,17 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Unify Circuit component.""" + ir.async_create_issue( + hass, + DOMAIN, + "service_removal", + breaks_in_ha_version="2024.7.0", + is_fixable=False, + is_persistent=True, + severity=ir.IssueSeverity.WARNING, + translation_key="service_removal", + translation_placeholders={"integration": "Unify Circuit", "domain": DOMAIN}, + ) webhooks = config[DOMAIN][CONF_WEBHOOK] for webhook_conf in webhooks: diff --git a/homeassistant/components/circuit/strings.json b/homeassistant/components/circuit/strings.json new file mode 100644 index 00000000000..b9cb852d5b9 --- /dev/null +++ b/homeassistant/components/circuit/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "service_removal": { + "title": "The {integration} integration is being removed", + "description": "The {integration} integration will be removed, as the service is no longer maintained.\n\n\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." + } + } +} From fd08b7281ec06ceb1d48f4916601c4d30de78ef0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 22:07:16 +0200 Subject: [PATCH 320/426] Convert solaredge to asyncio with aiosolaredge (#115599) --- CODEOWNERS | 4 +-- .../components/solaredge/__init__.py | 14 ++++---- .../components/solaredge/config_flow.py | 19 +++++----- .../components/solaredge/coordinator.py | 35 ++++++++----------- .../components/solaredge/manifest.json | 6 ++-- homeassistant/components/solaredge/sensor.py | 6 ++-- requirements_all.txt | 6 ++-- requirements_test_all.txt | 6 ++-- .../components/solaredge/test_config_flow.py | 15 ++++---- .../components/solaredge/test_coordinator.py | 18 +++++----- 10 files changed, 65 insertions(+), 64 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 5dcf4b3df81..c8a391fd7dc 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1286,8 +1286,8 @@ build.json @home-assistant/supervisor /tests/components/snmp/ @nmaggioni /homeassistant/components/snooz/ @AustinBrunkhorst /tests/components/snooz/ @AustinBrunkhorst -/homeassistant/components/solaredge/ @frenck -/tests/components/solaredge/ @frenck +/homeassistant/components/solaredge/ @frenck @bdraco +/tests/components/solaredge/ @frenck @bdraco /homeassistant/components/solaredge_local/ @drobtravels @scheric /homeassistant/components/solarlog/ @Ernst79 /tests/components/solarlog/ @Ernst79 diff --git a/homeassistant/components/solaredge/__init__.py b/homeassistant/components/solaredge/__init__.py index 69e02c1875c..64f76372e91 100644 --- a/homeassistant/components/solaredge/__init__.py +++ b/homeassistant/components/solaredge/__init__.py @@ -4,13 +4,14 @@ from __future__ import annotations import socket -from requests.exceptions import ConnectTimeout, HTTPError -from solaredge import Solaredge +from aiohttp import ClientError +from aiosolaredge import SolarEdge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from .const import CONF_SITE_ID, DATA_API_CLIENT, DOMAIN, LOGGER @@ -22,13 +23,12 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up SolarEdge from a config entry.""" - api = Solaredge(entry.data[CONF_API_KEY]) + session = async_get_clientsession(hass) + api = SolarEdge(entry.data[CONF_API_KEY], session) try: - response = await hass.async_add_executor_job( - api.get_details, entry.data[CONF_SITE_ID] - ) - except (ConnectTimeout, HTTPError, socket.gaierror) as ex: + response = await api.get_details(entry.data[CONF_SITE_ID]) + except (TimeoutError, ClientError, socket.gaierror) as ex: LOGGER.error("Could not retrieve details from SolarEdge API") raise ConfigEntryNotReady from ex diff --git a/homeassistant/components/solaredge/config_flow.py b/homeassistant/components/solaredge/config_flow.py index b75af866549..6235e22400f 100644 --- a/homeassistant/components/solaredge/config_flow.py +++ b/homeassistant/components/solaredge/config_flow.py @@ -2,15 +2,17 @@ from __future__ import annotations +import socket from typing import Any -from requests.exceptions import ConnectTimeout, HTTPError -import solaredge +from aiohttp import ClientError +import aiosolaredge import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME from homeassistant.core import callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util import slugify from .const import CONF_SITE_ID, DEFAULT_NAME, DOMAIN @@ -38,15 +40,16 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN): """Return True if site_id exists in configuration.""" return site_id in self._async_current_site_ids() - def _check_site(self, site_id: str, api_key: str) -> bool: + async def _async_check_site(self, site_id: str, api_key: str) -> bool: """Check if we can connect to the soleredge api service.""" - api = solaredge.Solaredge(api_key) + session = async_get_clientsession(self.hass) + api = aiosolaredge.SolarEdge(api_key, session) try: - response = api.get_details(site_id) + response = await api.get_details(site_id) if response["details"]["status"].lower() != "active": self._errors[CONF_SITE_ID] = "site_not_active" return False - except (ConnectTimeout, HTTPError): + except (TimeoutError, ClientError, socket.gaierror): self._errors[CONF_SITE_ID] = "could_not_connect" return False except KeyError: @@ -66,9 +69,7 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN): else: site = user_input[CONF_SITE_ID] api = user_input[CONF_API_KEY] - can_connect = await self.hass.async_add_executor_job( - self._check_site, site, api - ) + can_connect = await self._async_check_site(site, api) if can_connect: return self.async_create_entry( title=name, data={CONF_SITE_ID: site, CONF_API_KEY: api} diff --git a/homeassistant/components/solaredge/coordinator.py b/homeassistant/components/solaredge/coordinator.py index d2da99820d7..0c264c1c514 100644 --- a/homeassistant/components/solaredge/coordinator.py +++ b/homeassistant/components/solaredge/coordinator.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from datetime import date, datetime, timedelta from typing import Any -from solaredge import Solaredge +from aiosolaredge import SolarEdge from stringcase import snakecase from homeassistant.core import HomeAssistant, callback @@ -27,7 +27,7 @@ class SolarEdgeDataService(ABC): coordinator: DataUpdateCoordinator[None] - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the data object.""" self.api = api self.site_id = site_id @@ -54,12 +54,8 @@ class SolarEdgeDataService(ABC): """Update interval.""" @abstractmethod - def update(self) -> None: - """Update data in executor.""" - async def async_update_data(self) -> None: """Update data.""" - await self.hass.async_add_executor_job(self.update) class SolarEdgeOverviewDataService(SolarEdgeDataService): @@ -70,10 +66,10 @@ class SolarEdgeOverviewDataService(SolarEdgeDataService): """Update interval.""" return OVERVIEW_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_overview(self.site_id) + data = await self.api.get_overview(self.site_id) overview = data["overview"] except KeyError as ex: raise UpdateFailed("Missing overview data, skipping update") from ex @@ -113,11 +109,11 @@ class SolarEdgeDetailsDataService(SolarEdgeDataService): """Update interval.""" return DETAILS_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_details(self.site_id) + data = await self.api.get_details(self.site_id) details = data["details"] except KeyError as ex: raise UpdateFailed("Missing details data, skipping update") from ex @@ -157,10 +153,10 @@ class SolarEdgeInventoryDataService(SolarEdgeDataService): """Update interval.""" return INVENTORY_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_inventory(self.site_id) + data = await self.api.get_inventory(self.site_id) inventory = data["Inventory"] except KeyError as ex: raise UpdateFailed("Missing inventory data, skipping update") from ex @@ -178,7 +174,7 @@ class SolarEdgeInventoryDataService(SolarEdgeDataService): class SolarEdgeEnergyDetailsService(SolarEdgeDataService): """Get and update the latest power flow data.""" - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the power flow data service.""" super().__init__(hass, api, site_id) @@ -189,17 +185,16 @@ class SolarEdgeEnergyDetailsService(SolarEdgeDataService): """Update interval.""" return ENERGY_DETAILS_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: now = datetime.now() today = date.today() midnight = datetime.combine(today, datetime.min.time()) - data = self.api.get_energy_details( + data = await self.api.get_energy_details( self.site_id, midnight, - now.strftime("%Y-%m-%d %H:%M:%S"), - meters=None, + now, time_unit="DAY", ) energy_details = data["energyDetails"] @@ -239,7 +234,7 @@ class SolarEdgeEnergyDetailsService(SolarEdgeDataService): class SolarEdgePowerFlowDataService(SolarEdgeDataService): """Get and update the latest power flow data.""" - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the power flow data service.""" super().__init__(hass, api, site_id) @@ -250,10 +245,10 @@ class SolarEdgePowerFlowDataService(SolarEdgeDataService): """Update interval.""" return POWER_FLOW_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_current_power_flow(self.site_id) + data = await self.api.get_current_power_flow(self.site_id) power_flow = data["siteCurrentPowerFlow"] except KeyError as ex: raise UpdateFailed("Missing power flow data, skipping update") from ex diff --git a/homeassistant/components/solaredge/manifest.json b/homeassistant/components/solaredge/manifest.json index 22759b1be7c..02f96c0211f 100644 --- a/homeassistant/components/solaredge/manifest.json +++ b/homeassistant/components/solaredge/manifest.json @@ -1,7 +1,7 @@ { "domain": "solaredge", "name": "SolarEdge", - "codeowners": ["@frenck"], + "codeowners": ["@frenck", "@bdraco"], "config_flow": true, "dhcp": [ { @@ -12,6 +12,6 @@ "documentation": "https://www.home-assistant.io/integrations/solaredge", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["solaredge"], - "requirements": ["solaredge==0.0.2", "stringcase==1.2.0"] + "loggers": ["aiosolaredge"], + "requirements": ["aiosolaredge==0.2.0", "stringcase==1.2.0"] } diff --git a/homeassistant/components/solaredge/sensor.py b/homeassistant/components/solaredge/sensor.py index 5ec65a3b9a5..b3345d5dc86 100644 --- a/homeassistant/components/solaredge/sensor.py +++ b/homeassistant/components/solaredge/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any -from solaredge import Solaredge +from aiosolaredge import SolarEdge from homeassistant.components.sensor import ( SensorDeviceClass, @@ -205,7 +205,7 @@ async def async_setup_entry( ) -> None: """Add an solarEdge entry.""" # Add the needed sensors to hass - api: Solaredge = hass.data[DOMAIN][entry.entry_id][DATA_API_CLIENT] + api: SolarEdge = hass.data[DOMAIN][entry.entry_id][DATA_API_CLIENT] sensor_factory = SolarEdgeSensorFactory(hass, entry.data[CONF_SITE_ID], api) for service in sensor_factory.all_services: @@ -223,7 +223,7 @@ async def async_setup_entry( class SolarEdgeSensorFactory: """Factory which creates sensors based on the sensor_key.""" - def __init__(self, hass: HomeAssistant, site_id: str, api: Solaredge) -> None: + def __init__(self, hass: HomeAssistant, site_id: str, api: SolarEdge) -> None: """Initialize the factory.""" details = SolarEdgeDetailsDataService(hass, api, site_id) diff --git a/requirements_all.txt b/requirements_all.txt index f212a8675e8..d51dc0225ed 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -367,6 +367,9 @@ aioskybell==22.7.0 # homeassistant.components.slimproto aioslimproto==3.0.0 +# homeassistant.components.solaredge +aiosolaredge==0.2.0 + # homeassistant.components.steamist aiosteamist==0.3.2 @@ -2574,9 +2577,6 @@ soco==0.30.3 # homeassistant.components.solaredge_local solaredge-local==0.2.3 -# homeassistant.components.solaredge -solaredge==0.0.2 - # homeassistant.components.solax solax==3.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8f318a24b5e..7b04e7cf037 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -340,6 +340,9 @@ aioskybell==22.7.0 # homeassistant.components.slimproto aioslimproto==3.0.0 +# homeassistant.components.solaredge +aiosolaredge==0.2.0 + # homeassistant.components.steamist aiosteamist==0.3.2 @@ -1990,9 +1993,6 @@ snapcast==2.3.6 # homeassistant.components.sonos soco==0.30.3 -# homeassistant.components.solaredge -solaredge==0.0.2 - # homeassistant.components.solax solax==3.1.0 diff --git a/tests/components/solaredge/test_config_flow.py b/tests/components/solaredge/test_config_flow.py index 9ff605a871d..759a4d6b421 100644 --- a/tests/components/solaredge/test_config_flow.py +++ b/tests/components/solaredge/test_config_flow.py @@ -1,9 +1,9 @@ """Tests for the SolarEdge config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohttp import ClientError import pytest -from requests.exceptions import ConnectTimeout, HTTPError from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME, DOMAIN from homeassistant.config_entries import SOURCE_IGNORE, SOURCE_USER @@ -22,8 +22,11 @@ API_KEY = "a1b2c3d4e5f6g7h8" def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() - api.get_details.return_value = {"details": {"status": "active"}} - with patch("solaredge.Solaredge", return_value=api): + api.get_details = AsyncMock(return_value={"details": {"status": "active"}}) + with patch( + "homeassistant.components.solaredge.config_flow.aiosolaredge.SolarEdge", + return_value=api, + ): yield api @@ -117,7 +120,7 @@ async def test_asserts(hass: HomeAssistant, test_api: Mock) -> None: assert result.get("errors") == {CONF_SITE_ID: "invalid_api_key"} # test with ConnectionTimeout - test_api.get_details.side_effect = ConnectTimeout() + test_api.get_details = AsyncMock(side_effect=TimeoutError()) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -127,7 +130,7 @@ async def test_asserts(hass: HomeAssistant, test_api: Mock) -> None: assert result.get("errors") == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError - test_api.get_details.side_effect = HTTPError() + test_api.get_details = AsyncMock(side_effect=ClientError()) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, diff --git a/tests/components/solaredge/test_coordinator.py b/tests/components/solaredge/test_coordinator.py index b1496d18d93..7a6b3af1cde 100644 --- a/tests/components/solaredge/test_coordinator.py +++ b/tests/components/solaredge/test_coordinator.py @@ -1,6 +1,6 @@ """Tests for the SolarEdge coordinator services.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest @@ -25,7 +25,7 @@ def enable_all_entities(entity_registry_enabled_by_default): """Make sure all entities are enabled.""" -@patch("homeassistant.components.solaredge.Solaredge") +@patch("homeassistant.components.solaredge.SolarEdge") async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge, hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: @@ -35,7 +35,9 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( title=DEFAULT_NAME, data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ) - mock_solaredge().get_details.return_value = {"details": {"status": "active"}} + mock_solaredge().get_details = AsyncMock( + return_value={"details": {"status": "active"}} + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -50,7 +52,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( "currentPower": {"power": 0.0}, } } - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -60,7 +62,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # Invalid energy values, lifeTimeData energy is lower than last year, month or day. mock_overview_data["overview"]["lifeTimeData"]["energy"] = 0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -71,7 +73,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # New valid energy values update mock_overview_data["overview"]["lifeTimeData"]["energy"] = 100001 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -82,7 +84,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # Invalid energy values, lastYearData energy is lower than last month or day. mock_overview_data["overview"]["lastYearData"]["energy"] = 0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -100,7 +102,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_overview_data["overview"]["lastYearData"]["energy"] = 0.0 mock_overview_data["overview"]["lastMonthData"]["energy"] = 0.0 mock_overview_data["overview"]["lastDayData"]["energy"] = 0.0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) From a33aacfcaa109257ea5c83d39531a1efa9485e4a Mon Sep 17 00:00:00 2001 From: Nalin Mahajan Date: Tue, 23 Apr 2024 15:10:16 -0500 Subject: [PATCH 321/426] Add Retry for C4 API due to flakiness (#113857) Co-authored-by: nalin29 --- homeassistant/components/control4/__init__.py | 26 ++++++++++++++----- homeassistant/components/control4/const.py | 2 ++ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index b8d195fcb05..4b24ac6bf77 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -30,6 +30,7 @@ from homeassistant.helpers.update_coordinator import ( ) from .const import ( + API_RETRY_TIMES, CONF_ACCOUNT, CONF_CONFIG_LISTENER, CONF_CONTROLLER_UNIQUE_ID, @@ -47,6 +48,18 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER] +async def call_c4_api_retry(func, *func_args): + """Call C4 API function and retry on failure.""" + for i in range(API_RETRY_TIMES): + try: + output = await func(*func_args) + return output + except client_exceptions.ClientError as exception: + _LOGGER.error("Error connecting to Control4 account API: %s", exception) + if i == API_RETRY_TIMES - 1: + raise ConfigEntryNotReady(exception) from exception + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Control4 from a config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -74,18 +87,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: controller_unique_id = config[CONF_CONTROLLER_UNIQUE_ID] entry_data[CONF_CONTROLLER_UNIQUE_ID] = controller_unique_id - director_token_dict = await account.getDirectorBearerToken(controller_unique_id) - director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False) + director_token_dict = await call_c4_api_retry( + account.getDirectorBearerToken, controller_unique_id + ) + director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False) director = C4Director( config[CONF_HOST], director_token_dict[CONF_TOKEN], director_session ) entry_data[CONF_DIRECTOR] = director - # Add Control4 controller to device registry - controller_href = (await account.getAccountControllers())["href"] - entry_data[CONF_DIRECTOR_SW_VERSION] = await account.getControllerOSVersion( - controller_href + controller_href = (await call_c4_api_retry(account.getAccountControllers))["href"] + entry_data[CONF_DIRECTOR_SW_VERSION] = await call_c4_api_retry( + account.getControllerOSVersion, controller_href ) _, model, mac_address = controller_unique_id.split("_", 3) diff --git a/homeassistant/components/control4/const.py b/homeassistant/components/control4/const.py index f8d939e1ac5..57074c00108 100644 --- a/homeassistant/components/control4/const.py +++ b/homeassistant/components/control4/const.py @@ -5,6 +5,8 @@ DOMAIN = "control4" DEFAULT_SCAN_INTERVAL = 5 MIN_SCAN_INTERVAL = 1 +API_RETRY_TIMES = 5 + CONF_ACCOUNT = "account" CONF_DIRECTOR = "director" CONF_DIRECTOR_SW_VERSION = "director_sw_version" From f249a9ba4bdf54f561108a58297b28ded1ce1b1c Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 24 Apr 2024 06:11:41 +1000 Subject: [PATCH 322/426] Add API scope checks to Teslemetry (#113640) --- .../components/teslemetry/__init__.py | 8 +++-- .../components/teslemetry/climate.py | 23 +++++++++++++- homeassistant/components/teslemetry/entity.py | 7 ++++- homeassistant/components/teslemetry/models.py | 2 ++ tests/components/teslemetry/conftest.py | 18 ++++++++++- tests/components/teslemetry/const.py | 18 +++++++++++ tests/components/teslemetry/test_climate.py | 31 +++++++++++++++++-- 7 files changed, 99 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index 084d51ff31b..45fd1eee327 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -4,6 +4,7 @@ import asyncio from typing import Final from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific +from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, @@ -37,6 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: access_token=access_token, ) try: + scopes = (await teslemetry.metadata())["scopes"] products = (await teslemetry.products())["response"] except InvalidToken as e: raise ConfigEntryAuthFailed from e @@ -49,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: vehicles: list[TeslemetryVehicleData] = [] energysites: list[TeslemetryEnergyData] = [] for product in products: - if "vin" in product: + if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: vin = product["vin"] api = VehicleSpecific(teslemetry.vehicle, vin) coordinator = TeslemetryVehicleDataCoordinator(hass, api) @@ -60,7 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: vin=vin, ) ) - elif "energy_site_id" in product: + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] api = EnergySpecific(teslemetry.energy, site_id) energysites.append( @@ -86,7 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Setup Platforms hass.data.setdefault(DOMAIN, {})[entry.entry_id] = TeslemetryData( - vehicles, energysites + vehicles, energysites, scopes ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 0835785d194..4c1c05570ab 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from tesla_fleet_api.const import Scope + from homeassistant.components.climate import ( ClimateEntity, ClimateEntityFeature, @@ -17,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, TeslemetryClimateSide from .context import handle_command from .entity import TeslemetryVehicleEntity +from .models import TeslemetryVehicleData async def async_setup_entry( @@ -26,7 +29,7 @@ async def async_setup_entry( data = hass.data[DOMAIN][entry.entry_id] async_add_entities( - TeslemetryClimateEntity(vehicle, TeslemetryClimateSide.DRIVER) + TeslemetryClimateEntity(vehicle, TeslemetryClimateSide.DRIVER, data.scopes) for vehicle in data.vehicles ) @@ -48,6 +51,22 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): _attr_preset_modes = ["off", "keep", "dog", "camp"] _enable_turn_on_off_backwards_compatibility = False + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: Scope, + ) -> None: + """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + super().__init__( + data, + side, + ) + @property def hvac_mode(self) -> HVACMode | None: """Return hvac operation ie. heat, cool mode.""" @@ -82,6 +101,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_on(self) -> None: """Set the climate state to on.""" + self.raise_for_scope() with handle_command(): await self.wake_up_if_asleep() await self.api.auto_conditioning_start() @@ -89,6 +109,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" + self.raise_for_scope() with handle_command(): await self.wake_up_if_asleep() await self.api.auto_conditioning_stop() diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index eda3d26f341..d67a1bd1770 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -5,7 +5,7 @@ from typing import Any from tesla_fleet_api.exceptions import TeslaFleetError -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -83,6 +83,11 @@ class TeslemetryVehicleEntity(CoordinatorEntity[TeslemetryVehicleDataCoordinator self.coordinator.data[key] = value self.async_write_ha_state() + def raise_for_scope(self): + """Raise an error if a scope is not available.""" + if not self.scoped: + raise ServiceValidationError("Missing required scope") + class TeslemetryEnergyEntity(CoordinatorEntity[TeslemetryEnergyDataCoordinator]): """Parent class for Teslemetry Energy Entities.""" diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index d6f15e2e932..615156e6fdc 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -6,6 +6,7 @@ import asyncio from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific +from tesla_fleet_api.const import Scope from .coordinator import ( TeslemetryEnergyDataCoordinator, @@ -19,6 +20,7 @@ class TeslemetryData: vehicles: list[TeslemetryVehicleData] energysites: list[TeslemetryEnergyData] + scopes: list[Scope] @dataclass diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index f252787b37c..9040ec96a03 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -7,7 +7,23 @@ from unittest.mock import patch import pytest -from .const import LIVE_STATUS, PRODUCTS, RESPONSE_OK, VEHICLE_DATA, WAKE_UP_ONLINE +from .const import ( + LIVE_STATUS, + METADATA, + PRODUCTS, + RESPONSE_OK, + VEHICLE_DATA, + WAKE_UP_ONLINE, +) + + +@pytest.fixture(autouse=True) +def mock_metadata(): + """Mock Tesla Fleet Api metadata method.""" + with patch( + "homeassistant.components.teslemetry.Teslemetry.metadata", return_value=METADATA + ) as mock_products: + yield mock_products @pytest.fixture(autouse=True) diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 776cc231a5c..96e9ead8912 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -16,3 +16,21 @@ VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) RESPONSE_OK = {"response": {}, "error": None} + +METADATA = { + "region": "NA", + "scopes": [ + "openid", + "offline_access", + "user_data", + "vehicle_device_data", + "vehicle_cmds", + "vehicle_charging_cmds", + "energy_device_data", + "energy_cmds", + ], +} +METADATA_NOSCOPE = { + "region": "NA", + "scopes": ["openid", "offline_access", "vehicle_device_data"], +} diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index e83e9d648cd..a05bc07b305 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -22,11 +22,11 @@ from homeassistant.components.climate import ( from homeassistant.components.teslemetry.coordinator import SYNC_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import assert_entities, setup_platform -from .const import WAKE_UP_ASLEEP, WAKE_UP_ONLINE +from .const import METADATA_NOSCOPE, WAKE_UP_ASLEEP, WAKE_UP_ONLINE from tests.common import async_fire_time_changed @@ -176,3 +176,30 @@ async def test_asleep_or_offline( ) await hass.async_block_till_done() mock_wake_up.assert_called_once() + + +async def test_climate_noscope( + hass: HomeAssistant, + mock_metadata, +) -> None: + """Tests that the climate entity is correct.""" + mock_metadata.return_value = METADATA_NOSCOPE + + await setup_platform(hass, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.HEAT_COOL}, + blocking=True, + ) + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, + blocking=True, + ) From 2c7a1ddb1d79973f6a0321615f04fe180ace39f5 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Tue, 23 Apr 2024 22:13:09 +0200 Subject: [PATCH 323/426] Bump plugwise to v0.37.2 (#115989) --- homeassistant/components/plugwise/__init__.py | 10 ++++++++- .../components/plugwise/binary_sensor.py | 4 ++-- .../components/plugwise/manifest.json | 2 +- .../components/plugwise/strings.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../anna_heatpump_heating/all_data.json | 2 +- .../m_anna_heatpump_cooling/all_data.json | 2 +- .../m_anna_heatpump_idle/all_data.json | 2 +- .../fixtures/stretch_v31/all_data.json | 1 - tests/components/plugwise/test_init.py | 21 +++++++++++++++---- 11 files changed, 35 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index 28389ffa357..3140e518688 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -49,8 +49,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def async_migrate_entity_entry(entry: er.RegistryEntry) -> dict[str, Any] | None: """Migrate Plugwise entity entries. - - Migrates unique ID from old relay switches to the new unique ID + - Migrates old unique ID's from old binary_sensors and switches to the new unique ID's """ + if entry.domain == Platform.BINARY_SENSOR and entry.unique_id.endswith( + "-slave_boiler_state" + ): + return { + "new_unique_id": entry.unique_id.replace( + "-slave_boiler_state", "-secondary_boiler_state" + ) + } if entry.domain == Platform.SWITCH and entry.unique_id.endswith("-plug"): return {"new_unique_id": entry.unique_id.replace("-plug", "-relay")} diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index d32ae94160f..01ebc736dbe 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -64,8 +64,8 @@ BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( - key="slave_boiler_state", - translation_key="slave_boiler_state", + key="secondary_boiler_state", + translation_key="secondary_boiler_state", entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 888f813760a..1eb1cf6e8b6 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==0.37.1"], + "requirements": ["plugwise==0.37.2"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index 7d26f5a624c..ef2d6458441 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -48,7 +48,7 @@ "cooling_state": { "name": "[%key:component::climate::entity_component::_::state_attributes::hvac_action::state::cooling%]" }, - "slave_boiler_state": { + "secondary_boiler_state": { "name": "Secondary boiler state" }, "plugwise_notification": { diff --git a/requirements_all.txt b/requirements_all.txt index d51dc0225ed..b2c21c1239d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1548,7 +1548,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.1 +plugwise==0.37.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b04e7cf037..664467ea0a5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1225,7 +1225,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.1 +plugwise==0.37.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index d655f95c79b..d496edb4149 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": true, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 92c95f6c5a9..ef7af8a362b 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": false, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index be400b9bc98..8f2e6a75f3f 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": false, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/stretch_v31/all_data.json b/tests/components/plugwise/fixtures/stretch_v31/all_data.json index f42cde65b39..a875324fc13 100644 --- a/tests/components/plugwise/fixtures/stretch_v31/all_data.json +++ b/tests/components/plugwise/fixtures/stretch_v31/all_data.json @@ -136,7 +136,6 @@ "gateway": { "gateway_id": "0000aaaa0000aaaa0000aaaa0000aa00", "item_count": 83, - "notifications": {}, "smile_name": "Stretch" } } diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 4eb0b2cb56a..b206b36be89 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -12,9 +12,8 @@ from plugwise.exceptions import ( import pytest from homeassistant.components.plugwise.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,6 +21,9 @@ from tests.common import MockConfigEntry HEATER_ID = "1cbf783bb11e4a7c8a6843dee3a86927" # Opentherm device_id for migration PLUG_ID = "cd0ddb54ef694e11ac18ed1cbce5dbbd" # VCR device_id for migration +SECONDARY_ID = ( + "1cbf783bb11e4a7c8a6843dee3a86927" # Heater_central device_id for migration +) async def test_load_unload_config_entry( @@ -77,7 +79,7 @@ async def test_gateway_config_entry_not_ready( [ ( { - "domain": SENSOR_DOMAIN, + "domain": Platform.SENSOR, "platform": DOMAIN, "unique_id": f"{HEATER_ID}-outdoor_temperature", "suggested_object_id": f"{HEATER_ID}-outdoor_temperature", @@ -118,7 +120,18 @@ async def test_migrate_unique_id_temperature( [ ( { - "domain": SWITCH_DOMAIN, + "domain": Platform.BINARY_SENSOR, + "platform": DOMAIN, + "unique_id": f"{SECONDARY_ID}-slave_boiler_state", + "suggested_object_id": f"{SECONDARY_ID}-slave_boiler_state", + "disabled_by": None, + }, + f"{SECONDARY_ID}-slave_boiler_state", + f"{SECONDARY_ID}-secondary_boiler_state", + ), + ( + { + "domain": Platform.SWITCH, "platform": DOMAIN, "unique_id": f"{PLUG_ID}-plug", "suggested_object_id": f"{PLUG_ID}-plug", From f1fa33483e499344ffca230f17ecd357f45eda3a Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Tue, 23 Apr 2024 22:23:44 +0200 Subject: [PATCH 324/426] Bump aioautomower to 2024.4.3 (#114500) --- .../husqvarna_automower/device_tracker.py | 6 ++++ .../husqvarna_automower/manifest.json | 2 +- .../components/husqvarna_automower/select.py | 7 +++-- .../components/husqvarna_automower/sensor.py | 3 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../snapshots/test_diagnostics.ambr | 4 ++- .../snapshots/test_sensor.ambr | 2 +- .../husqvarna_automower/test_sensor.py | 28 +++++++++++++++++-- 9 files changed, 45 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/device_tracker.py b/homeassistant/components/husqvarna_automower/device_tracker.py index a32fd8758bd..780d1da76fb 100644 --- a/homeassistant/components/husqvarna_automower/device_tracker.py +++ b/homeassistant/components/husqvarna_automower/device_tracker.py @@ -1,5 +1,7 @@ """Creates the device tracker entity for the mower.""" +from typing import TYPE_CHECKING + from homeassistant.components.device_tracker import SourceType, TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -44,9 +46,13 @@ class AutomowerDeviceTrackerEntity(AutomowerBaseEntity, TrackerEntity): @property def latitude(self) -> float: """Return latitude value of the device.""" + if TYPE_CHECKING: + assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].latitude @property def longitude(self) -> float: """Return longitude value of the device.""" + if TYPE_CHECKING: + assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].longitude diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index e4536ee594d..147c6dfb6d5 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.3.4"] + "requirements": ["aioautomower==2024.4.3"] } diff --git a/homeassistant/components/husqvarna_automower/select.py b/homeassistant/components/husqvarna_automower/select.py index e4376a1bca5..67aac4a2046 100644 --- a/homeassistant/components/husqvarna_automower/select.py +++ b/homeassistant/components/husqvarna_automower/select.py @@ -1,6 +1,7 @@ """Creates a select entity for the headlight of the mower.""" import logging +from typing import cast from aioautomower.exceptions import ApiException from aioautomower.model import HeadlightModes @@ -58,12 +59,14 @@ class AutomowerSelectEntity(AutomowerControlEntity, SelectEntity): @property def current_option(self) -> str: """Return the current option for the entity.""" - return self.mower_attributes.headlight.mode.lower() + return cast(HeadlightModes, self.mower_attributes.headlight.mode).lower() async def async_select_option(self, option: str) -> None: """Change the selected option.""" try: - await self.coordinator.api.set_headlight_mode(self.mower_id, option.upper()) + await self.coordinator.api.set_headlight_mode( + self.mower_id, cast(HeadlightModes, option.upper()) + ) except ApiException as exception: raise HomeAssistantError( f"Command couldn't be sent to the command queue: {exception}" diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 10aec9b1536..6840708ed42 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -18,7 +18,6 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfLength, UnitOf from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util import dt as dt_util from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator @@ -298,7 +297,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( key="next_start_timestamp", translation_key="next_start_timestamp", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: dt_util.as_local(data.planner.next_start_datetime), + value_fn=lambda data: data.planner.next_start_datetime, ), AutomowerSensorEntityDescription( key="error", diff --git a/requirements_all.txt b/requirements_all.txt index b2c21c1239d..df688e6e00f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -204,7 +204,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.3.4 +aioautomower==2024.4.3 # homeassistant.components.azure_devops aioazuredevops==2.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 664467ea0a5..60e54a81780 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -183,7 +183,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.3.4 +aioautomower==2024.4.3 # homeassistant.components.azure_devops aioazuredevops==2.0.0 diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index aea65005fc4..ee951986062 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -50,12 +50,14 @@ 'activity': 'PARKED_IN_CS', 'error_code': 0, 'error_datetime': None, + 'error_datetime_naive': None, 'error_key': None, 'mode': 'MAIN_AREA', 'state': 'RESTRICTED', }), 'planner': dict({ - 'next_start_datetime': '2023-06-05T19:00:00', + 'next_start_datetime': '2023-06-05T19:00:00+00:00', + 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'NOT_ACTIVE', }), diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index fda9c900240..7d4533afe72 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -548,7 +548,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-06T02:00:00+00:00', + 'state': '2023-06-05T19:00:00+00:00', }) # --- # name: test_sensor[sensor.test_mower_1_number_of_charging_cycles-entry] diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index f54ce9c6275..2c0661f82cb 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -10,7 +10,7 @@ from syrupy import SnapshotAssertion from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL -from homeassistant.const import Platform +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -46,7 +46,7 @@ async def test_sensor_unknown_states( async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get("sensor.test_mower_1_mode") - assert state.state == "unknown" + assert state.state == STATE_UNKNOWN async def test_cutting_blade_usage_time_sensor( @@ -63,6 +63,30 @@ async def test_cutting_blade_usage_time_sensor( assert state.state == "0.034" +async def test_next_start_sensor( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if this sensor is only added, if data is available.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("sensor.test_mower_1_next_start") + assert state is not None + assert state.state == "2023-06-05T19:00:00+00:00" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].planner.next_start_datetime = None + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_next_start") + assert state.state == STATE_UNKNOWN + + @pytest.mark.parametrize( ("sensor_to_test"), [ From 8f1761343ea418d477614ad65295c14fbb88bf82 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 22:24:36 +0200 Subject: [PATCH 325/426] Only work out job type once when setting up dispatcher (#116030) --- homeassistant/helpers/dispatcher.py | 13 +++++++++++-- homeassistant/util/logging.py | 30 ++++++++++++++++++----------- 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/homeassistant/helpers/dispatcher.py b/homeassistant/helpers/dispatcher.py index c1194c7da01..52d57e9cf08 100644 --- a/homeassistant/helpers/dispatcher.py +++ b/homeassistant/helpers/dispatcher.py @@ -7,7 +7,12 @@ from functools import partial import logging from typing import Any, TypeVarTuple, overload -from homeassistant.core import HassJob, HomeAssistant, callback +from homeassistant.core import ( + HassJob, + HomeAssistant, + callback, + get_hassjob_callable_job_type, +) from homeassistant.loader import bind_hass from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.logging import catch_log_exception @@ -161,9 +166,13 @@ def _generate_job( signal: SignalType[*_Ts] | str, target: Callable[[*_Ts], Any] | Callable[..., Any] ) -> HassJob[..., None | Coroutine[Any, Any, None]]: """Generate a HassJob for a signal and target.""" + job_type = get_hassjob_callable_job_type(target) return HassJob( - catch_log_exception(target, partial(_format_err, signal, target)), + catch_log_exception( + target, partial(_format_err, signal, target), job_type=job_type + ), f"dispatcher {signal}", + job_type=job_type, ) diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index 8709186face..ab163578846 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from collections.abc import Callable, Coroutine from functools import partial, wraps import inspect @@ -12,7 +11,12 @@ import queue import traceback from typing import Any, TypeVar, TypeVarTuple, cast, overload -from homeassistant.core import HomeAssistant, callback, is_callback +from homeassistant.core import ( + HassJobType, + HomeAssistant, + callback, + get_hassjob_callable_job_type, +) _T = TypeVar("_T") _Ts = TypeVarTuple("_Ts") @@ -129,34 +133,38 @@ def _callback_wrapper( @overload def catch_log_exception( - func: Callable[[*_Ts], Coroutine[Any, Any, Any]], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Coroutine[Any, Any, Any]], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], Coroutine[Any, Any, None]]: ... @overload def catch_log_exception( - func: Callable[[*_Ts], Any], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Any], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], None] | Callable[[*_Ts], Coroutine[Any, Any, None]]: ... def catch_log_exception( - func: Callable[[*_Ts], Any], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Any], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], None] | Callable[[*_Ts], Coroutine[Any, Any, None]]: """Decorate a function func to catch and log exceptions. If func is a coroutine function, a coroutine function will be returned. If func is a callback, a callback will be returned. """ - # Check for partials to properly determine if coroutine function - check_func = func - while isinstance(check_func, partial): - check_func = check_func.func # type: ignore[unreachable] # false positive + if job_type is None: + job_type = get_hassjob_callable_job_type(func) - if asyncio.iscoroutinefunction(check_func): + if job_type is HassJobType.Coroutinefunction: async_func = cast(Callable[[*_Ts], Coroutine[Any, Any, None]], func) return wraps(async_func)(partial(_async_wrapper, async_func, format_err)) # type: ignore[return-value] - if is_callback(check_func): + if job_type is HassJobType.Callback: return wraps(func)(partial(_callback_wrapper, func, format_err)) # type: ignore[return-value] return wraps(func)(partial(_sync_wrapper, func, format_err)) # type: ignore[return-value] From 0c583bb1d902f2bbbb7eac4b5dfcef7818182043 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 22:25:35 +0200 Subject: [PATCH 326/426] Fix ruff complaints in control4 (#116058) --- homeassistant/components/control4/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index 4b24ac6bf77..86a13de1ac8 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -52,8 +52,7 @@ async def call_c4_api_retry(func, *func_args): """Call C4 API function and retry on failure.""" for i in range(API_RETRY_TIMES): try: - output = await func(*func_args) - return output + return await func(*func_args) except client_exceptions.ClientError as exception: _LOGGER.error("Error connecting to Control4 account API: %s", exception) if i == API_RETRY_TIMES - 1: From 31d11b2362af06e9083edef633703af050834bd6 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 23 Apr 2024 22:26:01 +0200 Subject: [PATCH 327/426] Add re-auth flow for MQTT broker username and password (#116011) Co-authored-by: J. Nick Koston --- homeassistant/components/mqtt/client.py | 8 ++ homeassistant/components/mqtt/config_flow.py | 61 +++++++++++- homeassistant/components/mqtt/strings.json | 14 +++ tests/components/mqtt/test_config_flow.py | 97 ++++++++++++++++++++ tests/components/mqtt/test_init.py | 18 ++++ 5 files changed, 197 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 9a344e13023..133991ade16 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -895,10 +895,18 @@ class MQTT: import paho.mqtt.client as mqtt if result_code != mqtt.CONNACK_ACCEPTED: + if result_code in ( + mqtt.CONNACK_REFUSED_BAD_USERNAME_PASSWORD, + mqtt.CONNACK_REFUSED_NOT_AUTHORIZED, + ): + self._should_reconnect = False + self.hass.async_create_task(self.async_disconnect()) + self.config_entry.async_start_reauth(self.hass) _LOGGER.error( "Unable to connect to the MQTT broker: %s", mqtt.connack_string(result_code), ) + self._async_connection_result(False) return self.connected = True diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 5bf0c9c1879..8168b997fa6 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections import OrderedDict -from collections.abc import Callable +from collections.abc import Callable, Mapping import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType @@ -158,13 +158,23 @@ CERT_UPLOAD_SELECTOR = FileSelector( ) KEY_UPLOAD_SELECTOR = FileSelector(FileSelectorConfig(accept=".key,application/pkcs8")) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): TEXT_SELECTOR, + vol.Required(CONF_PASSWORD): PASSWORD_SELECTOR, + } +) +PWD_NOT_CHANGED = "__**password_not_changed**__" + class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None + _reauth_config_entry: ConfigEntry | None = None @staticmethod @callback @@ -183,6 +193,55 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_broker() + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication with Aladdin Connect.""" + + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm re-authentication with MQTT broker.""" + errors: dict[str, str] = {} + + assert self.entry is not None + if user_input: + password_changed = ( + user_password := user_input[CONF_PASSWORD] + ) != PWD_NOT_CHANGED + entry_password = self.entry.data.get(CONF_PASSWORD) + password = user_password if password_changed else entry_password + new_entry_data = { + **self.entry.data, + CONF_USERNAME: user_input.get(CONF_USERNAME), + CONF_PASSWORD: password, + } + if await self.hass.async_add_executor_job( + try_connection, + new_entry_data, + ): + return self.async_update_reload_and_abort( + self.entry, data=new_entry_data + ) + + errors["base"] = "invalid_auth" + + schema = self.add_suggested_values_to_schema( + REAUTH_SCHEMA, + { + CONF_USERNAME: self.entry.data.get(CONF_USERNAME), + CONF_PASSWORD: PWD_NOT_CHANGED, + }, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=schema, + errors=errors, + ) + async def async_step_broker( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 2bd47db63bc..fc5f0bc4970 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -68,10 +68,23 @@ "data_description": { "discovery": "Option to enable MQTT automatic discovery." } + }, + "reauth_confirm": { + "title": "Re-authentication required with the MQTT broker", + "description": "The MQTT broker reported an authentication error. Please confirm the brokers correct usernname and password.", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::mqtt::config::step::broker::data_description::username%]", + "password": "[%key:component::mqtt::config::step::broker::data_description::password%]" + } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" }, "error": { @@ -84,6 +97,7 @@ "bad_client_cert_key": "Client certificate and private key are not a valid pair", "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_inclusion": "The client certificate and private key must be configurered together" } }, diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index bbba791137a..56d19506a66 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -14,6 +14,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -1060,6 +1061,102 @@ async def test_skipping_advanced_options( assert result["step_id"] == step_id +@pytest.mark.parametrize( + ("test_input", "user_input", "new_password"), + [ + ( + { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "verysecret", + }, + { + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "newpassword", + }, + "newpassword", + ), + ( + { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "verysecret", + }, + { + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, + }, + "verysecret", + ), + ], +) +async def test_step_reauth( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, + mock_try_connection: MagicMock, + mock_reload_after_entry_update: MagicMock, + test_input: dict[str, Any], + user_input: dict[str, Any], + new_password: str, +) -> None: + """Test that the reauth step works.""" + + # Prepare the config entry + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + hass.config_entries.async_update_entry( + config_entry, + data=test_input, + ) + await mqtt_mock_entry() + + # Start reauth flow + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + assert result["context"]["source"] == "reauth" + + # Show the form + result = await hass.config_entries.flow.async_init( + mqtt.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + # Simulate re-auth fails + mock_try_connection.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=user_input + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + # Simulate re-auth succeeds + mock_try_connection.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=user_input + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert config_entry.data.get(mqtt.CONF_PASSWORD) == new_password + await hass.async_block_till_done() + + async def test_options_user_connection_fails( hass: HomeAssistant, mock_try_connection_time_out: MagicMock ) -> None: diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 7bb43568b30..9d135b89f36 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -2046,6 +2046,24 @@ async def test_logs_error_if_no_connect_broker( ) +@pytest.mark.parametrize("return_code", [4, 5]) +async def test_triggers_reauth_flow_if_auth_fails( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, + return_code: int, +) -> None: + """Test re-auth is triggered if authentication is failing.""" + await mqtt_mock_entry() + # test with rc = 4 -> CONNACK_REFUSED_NOT_AUTHORIZED and 5 -> CONNACK_REFUSED_BAD_USERNAME_PASSWORD + mqtt_client_mock.on_connect(mqtt_client_mock, None, None, return_code) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == "reauth" + + @patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.3) async def test_handle_mqtt_on_callback( hass: HomeAssistant, From a22c221722216338d23f1a47bd3716f8518cd390 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 22:28:31 +0200 Subject: [PATCH 328/426] Rename bus._async_fire to bus.async_fire_internal (#116027) --- .../components/automation/__init__.py | 5 ++- homeassistant/core.py | 39 +++++++++++-------- homeassistant/data_entry_flow.py | 2 +- homeassistant/helpers/script.py | 2 +- homeassistant/setup.py | 4 +- 5 files changed, 32 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 89a2817e236..fa242ac1557 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -707,7 +707,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity): @callback def started_action() -> None: - self.hass.bus.async_fire( + # This is always a callback from a coro so there is no + # risk of this running in a thread which allows us to use + # async_fire_internal + self.hass.bus.async_fire_internal( EVENT_AUTOMATION_TRIGGERED, event_data, context=trigger_context ) diff --git a/homeassistant/core.py b/homeassistant/core.py index 8471d2c4dcc..01329806e61 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -506,8 +506,8 @@ class HomeAssistant: setattr(self.loop, "_thread_ident", threading.get_ident()) self.set_state(CoreState.starting) - self.bus.async_fire(EVENT_CORE_CONFIG_UPDATE) - self.bus.async_fire(EVENT_HOMEASSISTANT_START) + self.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_START) if not self._tasks: pending: set[asyncio.Future[Any]] | None = None @@ -540,8 +540,8 @@ class HomeAssistant: return self.set_state(CoreState.running) - self.bus.async_fire(EVENT_CORE_CONFIG_UPDATE) - self.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + self.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_STARTED) def add_job( self, target: Callable[[*_Ts], Any] | Coroutine[Any, Any, Any], *args: *_Ts @@ -1115,7 +1115,7 @@ class HomeAssistant: self.exit_code = exit_code self.set_state(CoreState.stopping) - self.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_STOP) try: async with self.timeout.async_timeout(STOP_STAGE_SHUTDOWN_TIMEOUT): await self.async_block_till_done() @@ -1128,7 +1128,7 @@ class HomeAssistant: # Stage 3 - Final write self.set_state(CoreState.final_write) - self.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_FINAL_WRITE) try: async with self.timeout.async_timeout(FINAL_WRITE_STAGE_SHUTDOWN_TIMEOUT): await self.async_block_till_done() @@ -1141,7 +1141,7 @@ class HomeAssistant: # Stage 4 - Close self.set_state(CoreState.not_running) - self.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_CLOSE) # Make a copy of running_tasks since a task can finish # while we are awaiting canceled tasks to get their result @@ -1390,7 +1390,7 @@ class _OneTimeListener(Generic[_DataT]): return f"<_OneTimeListener {self.listener_job.target}>" -# Empty list, used by EventBus._async_fire +# Empty list, used by EventBus.async_fire_internal EMPTY_LIST: list[Any] = [] @@ -1455,10 +1455,12 @@ class EventBus: raise MaxLengthExceeded( event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE ) - return self._async_fire(event_type, event_data, origin, context, time_fired) + return self.async_fire_internal( + event_type, event_data, origin, context, time_fired + ) @callback - def _async_fire( + def async_fire_internal( self, event_type: EventType[_DataT] | str, event_data: _DataT | None = None, @@ -1466,7 +1468,12 @@ class EventBus: context: Context | None = None, time_fired: float | None = None, ) -> None: - """Fire an event. + """Fire an event, for internal use only. + + This method is intended to only be used by core internally + and should not be considered a stable API. We will make + breaking change to this function in the future and it + should not be used in integrations. This method must be run in the event loop. """ @@ -2112,7 +2119,7 @@ class StateMachine: "old_state": old_state, "new_state": None, } - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_CHANGED, state_changed_data, context=context, @@ -2225,7 +2232,7 @@ class StateMachine: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] old_state.last_reported = now # type: ignore[union-attr] - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_REPORTED, { "entity_id": entity_id, @@ -2268,7 +2275,7 @@ class StateMachine: "old_state": old_state, "new_state": state, } - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_CHANGED, state_changed_data, context=context, @@ -2622,7 +2629,7 @@ class ServiceRegistry: domain, service, processed_data, context, return_response ) - self._hass.bus._async_fire( # pylint: disable=protected-access + self._hass.bus.async_fire_internal( EVENT_CALL_SERVICE, { ATTR_DOMAIN: domain, @@ -2948,7 +2955,7 @@ class Config: self._update(source=ConfigSource.STORAGE, **kwargs) await self._async_store() - self.hass.bus.async_fire(EVENT_CORE_CONFIG_UPDATE, kwargs) + self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) _raise_issue_if_historic_currency(self.hass, self.currency) _raise_issue_if_no_country(self.hass, self.country) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 7e7019681af..f628879a7fd 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -442,7 +442,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ) ): # Tell frontend to reload the flow state. - self.hass.bus.async_fire( + self.hass.bus.async_fire_internal( EVENT_DATA_ENTRY_FLOW_PROGRESSED, {"handler": flow.handler, "flow_id": flow_id, "refresh": True}, ) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 62c781ae629..d925bf215ab 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -784,7 +784,7 @@ class _ScriptRun: ) trace_set_result(event=self._action[CONF_EVENT], event_data=event_data) - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( self._action[CONF_EVENT], event_data, context=self._context ) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 5772fce6955..fab70e31d9d 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -459,7 +459,9 @@ async def _async_setup_component( # Cleanup hass.data[DATA_SETUP].pop(domain, None) - hass.bus.async_fire(EVENT_COMPONENT_LOADED, EventComponentLoaded(component=domain)) + hass.bus.async_fire_internal( + EVENT_COMPONENT_LOADED, EventComponentLoaded(component=domain) + ) return True From a45040af145fcd10f252c8b4637ad3442a119e48 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 23 Apr 2024 22:30:20 +0200 Subject: [PATCH 329/426] Add entity translations to 17track (#116022) --- .../components/seventeentrack/icons.json | 30 +++++++++ .../components/seventeentrack/sensor.py | 51 ++++++++------- .../components/seventeentrack/strings.json | 28 ++++++++ .../components/seventeentrack/test_sensor.py | 64 +++++-------------- 4 files changed, 101 insertions(+), 72 deletions(-) create mode 100644 homeassistant/components/seventeentrack/icons.json diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json new file mode 100644 index 00000000000..05323a69743 --- /dev/null +++ b/homeassistant/components/seventeentrack/icons.json @@ -0,0 +1,30 @@ +{ + "entity": { + "sensor": { + "not_found": { + "default": "mdi:package" + }, + "in_transit": { + "default": "mdi:package" + }, + "expired": { + "default": "mdi:package" + }, + "ready_to_be_picked_up": { + "default": "mdi:package" + }, + "undelivered": { + "default": "mdi:package" + }, + "delivered": { + "default": "mdi:package" + }, + "returned": { + "default": "mdi:package" + }, + "package": { + "default": "mdi:package" + } + } + } +} diff --git a/homeassistant/components/seventeentrack/sensor.py b/homeassistant/components/seventeentrack/sensor.py index cbad01d0b0a..acc8471c030 100644 --- a/homeassistant/components/seventeentrack/sensor.py +++ b/homeassistant/components/seventeentrack/sensor.py @@ -18,6 +18,7 @@ from homeassistant.const import ( from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType @@ -38,7 +39,6 @@ from .const import ( CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, DOMAIN, - ENTITY_ID_TEMPLATE, LOGGER, NOTIFICATION_DELIVERED_MESSAGE, NOTIFICATION_DELIVERED_TITLE, @@ -150,7 +150,7 @@ async def async_setup_entry( ) async_add_entities( - SeventeenTrackSummarySensor(status, summary_data["status_name"], coordinator) + SeventeenTrackSummarySensor(status, coordinator) for status, summary_data in coordinator.data.summary.items() ) @@ -161,26 +161,37 @@ async def async_setup_entry( ) -class SeventeenTrackSummarySensor( - CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity -): - """Define a summary sensor.""" +class SeventeenTrackSensor(CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity): + """Define a 17Track sensor.""" _attr_attribution = ATTRIBUTION - _attr_icon = "mdi:package" + _attr_has_entity_name = True + + def __init__(self, coordinator: SeventeenTrackCoordinator) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.account_id)}, + entry_type=DeviceEntryType.SERVICE, + name="17Track", + ) + + +class SeventeenTrackSummarySensor(SeventeenTrackSensor): + """Define a summary sensor.""" + _attr_native_unit_of_measurement = "packages" def __init__( self, status: str, - status_name: str, coordinator: SeventeenTrackCoordinator, ) -> None: """Initialize the sensor.""" super().__init__(coordinator) self._status = status - self._attr_name = f"Seventeentrack Packages {status_name}" - self._attr_unique_id = f"summary_{coordinator.account_id}_{self._status}" + self._attr_translation_key = status + self._attr_unique_id = f"summary_{coordinator.account_id}_{status}" @property def available(self) -> bool: @@ -211,13 +222,10 @@ class SeventeenTrackSummarySensor( } -class SeventeenTrackPackageSensor( - CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity -): +class SeventeenTrackPackageSensor(SeventeenTrackSensor): """Define an individual package sensor.""" - _attr_attribution = ATTRIBUTION - _attr_icon = "mdi:package" + _attr_translation_key = "package" def __init__( self, @@ -228,24 +236,19 @@ class SeventeenTrackPackageSensor( super().__init__(coordinator) self._tracking_number = tracking_number self._previous_status = coordinator.data.live_packages[tracking_number].status - self.entity_id = ENTITY_ID_TEMPLATE.format(tracking_number) self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( coordinator.account_id, tracking_number ) + package = coordinator.data.live_packages[tracking_number] + if not (name := package.friendly_name): + name = tracking_number + self._attr_translation_placeholders = {"name": name} @property def available(self) -> bool: """Return whether the entity is available.""" return self._tracking_number in self.coordinator.data.live_packages - @property - def name(self) -> str: - """Return the name.""" - package = self.coordinator.data.live_packages.get(self._tracking_number) - if package is None or not (name := package.friendly_name): - name = self._tracking_number - return f"Seventeentrack Package: {name}" - @property def native_value(self) -> StateType: """Return the state.""" diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index 39ddb5ef8ef..8d91f926d50 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -38,5 +38,33 @@ "title": "The 17Track YAML configuration import request failed due to invalid authentication", "description": "Configuring 17Track using YAML is being removed but there were invalid credentials provided while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your 17Track credentials are correct and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the 17Track configuration from your YAML configuration entirely, restart Home Assistant, and add the 17Track integration manually." } + }, + "entity": { + "sensor": { + "not_found": { + "name": "Not found" + }, + "in_transit": { + "name": "In transit" + }, + "expired": { + "name": "Expired" + }, + "ready_to_be_picked_up": { + "name": "Ready to be picked up" + }, + "undelivered": { + "name": "Undelivered" + }, + "delivered": { + "name": "Delivered" + }, + "returned": { + "name": "Returned" + }, + "package": { + "name": "Package {name}" + } + } } } diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index 27de64ca89f..31fc5deec24 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -75,7 +75,7 @@ async def test_add_package( mock_seventeentrack.return_value.profile.packages.return_value = [package] await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") + assert hass.states.get("sensor.17track_package_friendly_name_1") assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package2 = get_package( @@ -89,7 +89,7 @@ async def test_add_package( await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_789") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 @@ -103,9 +103,9 @@ async def test_add_package_default_friendly_name( mock_seventeentrack.return_value.profile.packages.return_value = [package] await init_integration(hass, mock_config_entry) - state_456 = hass.states.get("sensor.seventeentrack_package_456") + state_456 = hass.states.get("sensor.17track_package_456") assert state_456 is not None - assert state_456.attributes["friendly_name"] == "Seventeentrack Package: 456" + assert state_456.attributes["friendly_name"] == "17Track Package 456" assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 @@ -132,16 +132,16 @@ async def test_remove_package( await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert hass.states.get("sensor.seventeentrack_package_789") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert hass.states.get("sensor.17track_package_friendly_name_2") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 mock_seventeentrack.return_value.profile.packages.return_value = [package2] await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_456") is None - assert hass.states.get("sensor.seventeentrack_package_789") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is None + assert hass.states.get("sensor.17track_package_friendly_name_2") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 @@ -157,35 +157,7 @@ async def test_package_error( mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is None - - -async def test_friendly_name_changed( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test friendly name change.""" - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await init_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - package = get_package(friendly_name="friendly name 2") - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await goto_future(hass, freezer) - - assert hass.states.get("sensor.seventeentrack_package_456") is not None - entity = hass.data["entity_components"]["sensor"].get_entity( - "sensor.seventeentrack_package_456" - ) - assert entity.name == "Seventeentrack Package: friendly name 2" - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") is None async def test_delivered_not_shown( @@ -204,7 +176,7 @@ async def test_delivered_not_shown( await init_integration(hass, mock_config_entry_with_default_options) await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_456") is None + assert hass.states.get("sensor.17track_package_friendly_name_1") is None persistent_notification_mock.create.assert_called() @@ -222,7 +194,7 @@ async def test_delivered_shown( ) as persistent_notification_mock: await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 persistent_notification_mock.create.assert_not_called() @@ -239,7 +211,7 @@ async def test_becomes_delivered_not_shown_notification( await init_integration(hass, mock_config_entry_with_default_options) - assert hass.states.get("sensor.seventeentrack_package_456") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package_delivered = get_package(status=40) @@ -268,9 +240,7 @@ async def test_summary_correctly_updated( assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - state_ready_picked = hass.states.get( - "sensor.seventeentrack_packages_ready_to_be_picked_up" - ) + state_ready_picked = hass.states.get("sensor.17track_ready_to_be_picked_up") assert state_ready_picked is not None assert len(state_ready_picked.attributes["packages"]) == 1 @@ -283,9 +253,7 @@ async def test_summary_correctly_updated( for state in hass.states.async_all(): assert state.state == "1" - state_ready_picked = hass.states.get( - "sensor.seventeentrack_packages_ready_to_be_picked_up" - ) + state_ready_picked = hass.states.get("sensor.17track_ready_to_be_picked_up") assert state_ready_picked is not None assert len(state_ready_picked.attributes["packages"]) == 0 @@ -323,9 +291,9 @@ async def test_utc_timestamp( await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - state_456 = hass.states.get("sensor.seventeentrack_package_456") + state_456 = hass.states.get("sensor.17track_package_friendly_name_1") assert state_456 is not None assert str(state_456.attributes.get("timestamp")) == "2020-08-10 03:32:00+00:00" From 0f60b404dfbf84cf5b32fa66a4ee633ca4c95e50 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 23 Apr 2024 22:50:31 +0200 Subject: [PATCH 330/426] Fix husqvarna_automower typing (#116060) --- .../components/husqvarna_automower/number.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index 8745b93479d..e2e617b427b 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -3,7 +3,7 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aioautomower.exceptions import ApiException from aioautomower.model import MowerAttributes @@ -12,7 +12,7 @@ from aioautomower.session import AutomowerSession from homeassistant.components.number import NumberEntity, NumberEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -32,6 +32,15 @@ class AutomowerNumberEntityDescription(NumberEntityDescription): set_value_fn: Callable[[AutomowerSession, str, float], Awaitable[Any]] +@callback +def _async_get_cutting_height(data: MowerAttributes) -> int: + """Return the cutting height.""" + if TYPE_CHECKING: + # Sensor does not get created if it is None + assert data.cutting_height is not None + return data.cutting_height + + NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( AutomowerNumberEntityDescription( key="cutting_height", @@ -41,7 +50,7 @@ NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( native_min_value=1, native_max_value=9, exists_fn=lambda data: data.cutting_height is not None, - value_fn=lambda data: data.cutting_height, + value_fn=_async_get_cutting_height, set_value_fn=lambda session, mower_id, cheight: session.set_cutting_height( mower_id, int(cheight) ), From 8d2813fb8b0f83a2c69b41e821ef0f1b3c222713 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 23 Apr 2024 22:53:13 +0200 Subject: [PATCH 331/426] Migrate legacy Ecobee notify service (#115592) * Migrate legacy Ecobee notify service * Correct comment * Update homeassistant/components/ecobee/notify.py Co-authored-by: Joost Lekkerkerker * Use version to check latest entry being used * Use 6 months of deprecation * Add repair flow tests * Only allow migrate_notify fix flow * Simplify repair flow * Use ecobee data to refrence entry * Make entry attrubute puiblic * Use hass.data ro retrieve entry. * Only register issue when legacy service when it is use * Remove backslash * Use ws_client.send_json_auto_id * Cleanup * Import domain from notify integration * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Update dependencies * Use Issue_registry fixture * remove `update_before_add` flag * Update homeassistant/components/ecobee/notify.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/ecobee/notify.py * Update tests/components/ecobee/conftest.py Co-authored-by: Joost Lekkerkerker * Fix typo and import --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/ecobee/__init__.py | 6 +- homeassistant/components/ecobee/const.py | 1 + homeassistant/components/ecobee/manifest.json | 1 + homeassistant/components/ecobee/notify.py | 57 ++++++++++++- homeassistant/components/ecobee/repairs.py | 37 +++++++++ homeassistant/components/ecobee/strings.json | 13 +++ tests/components/ecobee/common.py | 10 ++- tests/components/ecobee/conftest.py | 9 ++- tests/components/ecobee/test_notify.py | 57 +++++++++++++ tests/components/ecobee/test_repairs.py | 79 +++++++++++++++++++ 10 files changed, 259 insertions(+), 11 deletions(-) create mode 100644 homeassistant/components/ecobee/repairs.py create mode 100644 tests/components/ecobee/test_notify.py create mode 100644 tests/components/ecobee/test_repairs.py diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index 8083d0efcb4..6f032fbaae9 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -73,6 +73,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + # The legacy Ecobee notify.notify service is deprecated + # was with HA Core 2024.5.0 and will be removed with HA core 2024.11.0 hass.async_create_task( discovery.async_load_platform( hass, @@ -97,7 +99,7 @@ class EcobeeData: ) -> None: """Initialize the Ecobee data object.""" self._hass = hass - self._entry = entry + self.entry = entry self.ecobee = Ecobee( config={ECOBEE_API_KEY: api_key, ECOBEE_REFRESH_TOKEN: refresh_token} ) @@ -117,7 +119,7 @@ class EcobeeData: _LOGGER.debug("Refreshing ecobee tokens and updating config entry") if await self._hass.async_add_executor_job(self.ecobee.refresh_tokens): self._hass.config_entries.async_update_entry( - self._entry, + self.entry, data={ CONF_API_KEY: self.ecobee.config[ECOBEE_API_KEY], CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN], diff --git a/homeassistant/components/ecobee/const.py b/homeassistant/components/ecobee/const.py index e20acb5cfca..0eed0ab67f9 100644 --- a/homeassistant/components/ecobee/const.py +++ b/homeassistant/components/ecobee/const.py @@ -46,6 +46,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.HUMIDIFIER, + Platform.NOTIFY, Platform.NUMBER, Platform.SENSOR, Platform.WEATHER, diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index f3f5b59a36f..7e461230600 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -3,6 +3,7 @@ "name": "ecobee", "codeowners": [], "config_flow": true, + "dependencies": ["http", "repairs"], "documentation": "https://www.home-assistant.io/integrations/ecobee", "homekit": { "models": ["EB", "ecobee*"] diff --git a/homeassistant/components/ecobee/notify.py b/homeassistant/components/ecobee/notify.py index b2f6ccb05c8..787130c403f 100644 --- a/homeassistant/components/ecobee/notify.py +++ b/homeassistant/components/ecobee/notify.py @@ -2,11 +2,23 @@ from __future__ import annotations -from homeassistant.components.notify import ATTR_TARGET, BaseNotificationService +from functools import partial +from typing import Any + +from homeassistant.components.notify import ( + ATTR_TARGET, + BaseNotificationService, + NotifyEntity, +) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import Ecobee, EcobeeData from .const import DOMAIN +from .entity import EcobeeBaseEntity +from .repairs import migrate_notify_issue def get_service( @@ -18,18 +30,25 @@ def get_service( if discovery_info is None: return None - data = hass.data[DOMAIN] + data: EcobeeData = hass.data[DOMAIN] return EcobeeNotificationService(data.ecobee) class EcobeeNotificationService(BaseNotificationService): """Implement the notification service for the Ecobee thermostat.""" - def __init__(self, ecobee): + def __init__(self, ecobee: Ecobee) -> None: """Initialize the service.""" self.ecobee = ecobee - def send_message(self, message="", **kwargs): + async def async_send_message(self, message: str = "", **kwargs: Any) -> None: + """Send a message and raise issue.""" + migrate_notify_issue(self.hass) + await self.hass.async_add_executor_job( + partial(self.send_message, message, **kwargs) + ) + + def send_message(self, message: str = "", **kwargs: Any) -> None: """Send a message.""" targets = kwargs.get(ATTR_TARGET) @@ -39,3 +58,33 @@ class EcobeeNotificationService(BaseNotificationService): for target in targets: thermostat_index = int(target) self.ecobee.send_message(thermostat_index, message) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ecobee thermostat.""" + data: EcobeeData = hass.data[DOMAIN] + async_add_entities( + EcobeeNotifyEntity(data, index) for index in range(len(data.ecobee.thermostats)) + ) + + +class EcobeeNotifyEntity(EcobeeBaseEntity, NotifyEntity): + """Implement the notification entity for the Ecobee thermostat.""" + + _attr_name = None + _attr_has_entity_name = True + + def __init__(self, data: EcobeeData, thermostat_index: int) -> None: + """Initialize the thermostat.""" + super().__init__(data, thermostat_index) + self._attr_unique_id = ( + f"{self.thermostat["identifier"]}_notify_{thermostat_index}" + ) + + def send_message(self, message: str) -> None: + """Send a message.""" + self.data.ecobee.send_message(self.thermostat_index, message) diff --git a/homeassistant/components/ecobee/repairs.py b/homeassistant/components/ecobee/repairs.py new file mode 100644 index 00000000000..66474730b2f --- /dev/null +++ b/homeassistant/components/ecobee/repairs.py @@ -0,0 +1,37 @@ +"""Repairs support for Ecobee.""" + +from __future__ import annotations + +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs import RepairsFlow +from homeassistant.components.repairs.issue_handler import ConfirmRepairFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir + +from .const import DOMAIN + + +@callback +def migrate_notify_issue(hass: HomeAssistant) -> None: + """Ensure an issue is registered.""" + ir.async_create_issue( + hass, + DOMAIN, + "migrate_notify", + breaks_in_ha_version="2024.11.0", + issue_domain=NOTIFY_DOMAIN, + is_fixable=True, + is_persistent=True, + translation_key="migrate_notify", + severity=ir.IssueSeverity.WARNING, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert issue_id == "migrate_notify" + return ConfirmRepairFlow() diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index b1d1df65417..1d64b6d6b94 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -163,5 +163,18 @@ } } } + }, + "issues": { + "migrate_notify": { + "title": "Migration of Ecobee notify service", + "fix_flow": { + "step": { + "confirm": { + "description": "The Ecobee `notify` service has been migrated. A new `notify` entity per Thermostat is available now.\n\nUpdate any automations to use the new `notify.send_message` exposed by these new entities. When this is done, fix this issue and restart Home Assistant.", + "title": "Disable legacy Ecobee notify service" + } + } + } + } } } diff --git a/tests/components/ecobee/common.py b/tests/components/ecobee/common.py index 60f17c3618d..423b0eee320 100644 --- a/tests/components/ecobee/common.py +++ b/tests/components/ecobee/common.py @@ -4,14 +4,19 @@ from unittest.mock import patch from homeassistant.components.ecobee.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def setup_platform(hass, platform) -> MockConfigEntry: +async def setup_platform( + hass: HomeAssistant, + platform: str, +) -> MockConfigEntry: """Set up the ecobee platform.""" mock_entry = MockConfigEntry( + title=DOMAIN, domain=DOMAIN, data={ CONF_API_KEY: "ABC123", @@ -22,7 +27,6 @@ async def setup_platform(hass, platform) -> MockConfigEntry: with patch("homeassistant.components.ecobee.const.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) - - await hass.async_block_till_done() + await hass.async_block_till_done() return mock_entry diff --git a/tests/components/ecobee/conftest.py b/tests/components/ecobee/conftest.py index 952c2f3fba3..27d5a949c58 100644 --- a/tests/components/ecobee/conftest.py +++ b/tests/components/ecobee/conftest.py @@ -1,12 +1,13 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from homeassistant.components.ecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN -from tests.common import load_fixture +from tests.common import load_fixture, load_json_object_fixture @pytest.fixture(autouse=True) @@ -23,11 +24,15 @@ def requests_mock_fixture(requests_mock): @pytest.fixture -def mock_ecobee(): +def mock_ecobee() -> Generator[None, MagicMock]: """Mock an Ecobee object.""" ecobee = MagicMock() ecobee.request_pin.return_value = True ecobee.refresh_tokens.return_value = True + ecobee.thermostats = load_json_object_fixture("ecobee-data.json", "ecobee")[ + "thermostatList" + ] + ecobee.get_thermostat = lambda index: ecobee.thermostats[index] ecobee.config = {ECOBEE_API_KEY: "mocked_key", ECOBEE_REFRESH_TOKEN: "mocked_token"} with patch("homeassistant.components.ecobee.Ecobee", return_value=ecobee): diff --git a/tests/components/ecobee/test_notify.py b/tests/components/ecobee/test_notify.py new file mode 100644 index 00000000000..c66f04c752a --- /dev/null +++ b/tests/components/ecobee/test_notify.py @@ -0,0 +1,57 @@ +"""Test Ecobee notify service.""" + +from unittest.mock import MagicMock + +from homeassistant.components.ecobee import DOMAIN +from homeassistant.components.notify import ( + DOMAIN as NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from .common import setup_platform + +THERMOSTAT_ID = 0 + + +async def test_notify_entity_service( + hass: HomeAssistant, + mock_ecobee: MagicMock, +) -> None: + """Test the notify entity service.""" + await setup_platform(hass, NOTIFY_DOMAIN) + + entity_id = "notify.ecobee" + state = hass.states.get(entity_id) + assert state is not None + assert hass.services.has_service(NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE) + await hass.services.async_call( + NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + service_data={"entity_id": entity_id, "message": "It is too cold!"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + + +async def test_legacy_notify_service( + hass: HomeAssistant, + mock_ecobee: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service.""" + await setup_platform(hass, NOTIFY_DOMAIN) + + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py new file mode 100644 index 00000000000..19fdc6f7bba --- /dev/null +++ b/tests/components/ecobee/test_repairs.py @@ -0,0 +1,79 @@ +"""Test repairs for Ecobee integration.""" + +from http import HTTPStatus +from unittest.mock import MagicMock + +from homeassistant.components.ecobee import DOMAIN +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from .common import setup_platform + +from tests.typing import ClientSessionGenerator + +THERMOSTAT_ID = 0 + + +async def test_ecobee_repair_flow( + hass: HomeAssistant, + mock_ecobee: MagicMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the ecobee notify service repair flow is triggered.""" + await setup_platform(hass, NOTIFY_DOMAIN) + await async_process_repairs_platforms(hass) + + http_client = await hass_client() + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + + # Assert the issue is present + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 1 + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": DOMAIN, "issue_id": "migrate_notify"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + # Test confirm step in repair flow + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 0 From 72ed16c3e08a4311a5dbe5a46d3f6bacecee394d Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 23 Apr 2024 23:20:34 +0200 Subject: [PATCH 332/426] Update quality scale mqtt integration to platinum (#116059) --- homeassistant/components/mqtt/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 5f923868270..34370c82507 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -6,6 +6,6 @@ "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", "iot_class": "local_push", - "quality_scale": "gold", + "quality_scale": "platinum", "requirements": ["paho-mqtt==1.6.1"] } From 35db2e41015b8d861e92f3da191b36f3c1cb3810 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 23 Apr 2024 23:42:17 +0200 Subject: [PATCH 333/426] Complete test coverage for Tankerkonig (#115920) * complete tests * update snapshots after rebase --- .coveragerc | 5 -- tests/components/tankerkoenig/conftest.py | 24 +------ tests/components/tankerkoenig/const.py | 48 ++++++++++++++ .../snapshots/test_binary_sensor.ambr | 9 +++ .../tankerkoenig/snapshots/test_sensor.ambr | 52 +++++++++++++++ .../tankerkoenig/test_binary_sensor.py | 25 +++++++ .../tankerkoenig/test_config_flow.py | 22 +++++-- .../tankerkoenig/test_coordinator.py | 45 ++++++++++++- tests/components/tankerkoenig/test_sensor.py | 65 +++++++++++++++++++ 9 files changed, 262 insertions(+), 33 deletions(-) create mode 100644 tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/tankerkoenig/snapshots/test_sensor.ambr create mode 100644 tests/components/tankerkoenig/test_binary_sensor.py create mode 100644 tests/components/tankerkoenig/test_sensor.py diff --git a/.coveragerc b/.coveragerc index e4fe305a3bf..9eb32f7cda8 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1408,11 +1408,6 @@ omit = homeassistant/components/tado/water_heater.py homeassistant/components/tami4/button.py homeassistant/components/tank_utility/sensor.py - homeassistant/components/tankerkoenig/__init__.py - homeassistant/components/tankerkoenig/binary_sensor.py - homeassistant/components/tankerkoenig/coordinator.py - homeassistant/components/tankerkoenig/entity.py - homeassistant/components/tankerkoenig/sensor.py homeassistant/components/tapsaff/binary_sensor.py homeassistant/components/tautulli/__init__.py homeassistant/components/tautulli/coordinator.py diff --git a/tests/components/tankerkoenig/conftest.py b/tests/components/tankerkoenig/conftest.py index 4400082a45f..1a3dcb6f991 100644 --- a/tests/components/tankerkoenig/conftest.py +++ b/tests/components/tankerkoenig/conftest.py @@ -6,20 +6,11 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.tankerkoenig import DOMAIN -from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS -from homeassistant.const import ( - CONF_API_KEY, - CONF_LATITUDE, - CONF_LOCATION, - CONF_LONGITUDE, - CONF_NAME, - CONF_RADIUS, - CONF_SHOW_ON_MAP, -) +from homeassistant.const import CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .const import NEARBY_STATIONS, PRICES, STATION +from .const import CONFIG_DATA, NEARBY_STATIONS, PRICES, STATION from tests.common import MockConfigEntry @@ -55,16 +46,7 @@ async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: options={ CONF_SHOW_ON_MAP: True, }, - data={ - CONF_NAME: "Home", - CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], - CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, - CONF_RADIUS: 2.0, - CONF_STATIONS: [ - "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", - ], - }, + data=CONFIG_DATA, ) diff --git a/tests/components/tankerkoenig/const.py b/tests/components/tankerkoenig/const.py index 9ec64eb79a9..2c28753a7f3 100644 --- a/tests/components/tankerkoenig/const.py +++ b/tests/components/tankerkoenig/const.py @@ -2,6 +2,16 @@ from aiotankerkoenig import PriceInfo, Station, Status +from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS +from homeassistant.const import ( + CONF_API_KEY, + CONF_LATITUDE, + CONF_LOCATION, + CONF_LONGITUDE, + CONF_NAME, + CONF_RADIUS, +) + NEARBY_STATIONS = [ Station( id="3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", @@ -49,6 +59,25 @@ STATION = Station( state="xxXX", ) +STATION_MISSING_FUELTYPE = Station( + id="3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + name="Station ABC", + brand="Station", + street="Somewhere Street", + house_number="1", + post_code=1234, + place="Somewhere", + opening_times=[], + overrides=[], + whole_day=True, + is_open=True, + e5=1.719, + e10=1.659, + lat=51.1, + lng=13.1, + state="xxXX", +) + PRICES = { "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8": PriceInfo( status=Status.OPEN, @@ -57,3 +86,22 @@ PRICES = { diesel=1.659, ), } + +PRICES_MISSING_FUELTYPE = { + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8": PriceInfo( + status=Status.OPEN, + e5=1.719, + e10=1.659, + ), +} + +CONFIG_DATA = { + CONF_NAME: "Home", + CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", + CONF_FUEL_TYPES: ["e5"], + CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, + CONF_RADIUS: 2.0, + CONF_STATIONS: [ + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + ], +} diff --git a/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr b/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6b454820b05 --- /dev/null +++ b/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr @@ -0,0 +1,9 @@ +# serializer version: 1 +# name: test_binary_sensor + ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Station Somewhere Street 1 Status', + 'latitude': 51.1, + 'longitude': 13.1, + }) +# --- diff --git a/tests/components/tankerkoenig/snapshots/test_sensor.ambr b/tests/components/tankerkoenig/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..ec9a72e141d --- /dev/null +++ b/tests/components/tankerkoenig/snapshots/test_sensor.ambr @@ -0,0 +1,52 @@ +# serializer version: 1 +# name: test_sensor + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Super E10', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensor.1 + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Super', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensor.2 + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Diesel', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- diff --git a/tests/components/tankerkoenig/test_binary_sensor.py b/tests/components/tankerkoenig/test_binary_sensor.py new file mode 100644 index 00000000000..c103f2d26ff --- /dev/null +++ b/tests/components/tankerkoenig/test_binary_sensor.py @@ -0,0 +1,25 @@ +"""Tests for the Tankerkoening integration.""" + +from __future__ import annotations + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_ON +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_integration") +async def test_binary_sensor( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the tankerkoenig binary sensors.""" + + state = hass.states.get("binary_sensor.station_somewhere_street_1_status") + assert state + assert state.state == STATE_ON + assert state.attributes == snapshot diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index b255491cb31..022b49fd3f8 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for Tankerkoenig config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiotankerkoenig.exceptions import TankerkoenigInvalidKeyError @@ -21,6 +21,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component from .const import NEARBY_STATIONS @@ -208,7 +209,7 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non assert entry.data[CONF_API_KEY] == "269534f6-aaaa-bbbb-cccc-yyyyzzzzxxxx" -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow(hass: HomeAssistant, tankerkoenig: AsyncMock) -> None: """Test options flow.""" mock_config = MockConfigEntry( @@ -218,10 +219,17 @@ async def test_options_flow(hass: HomeAssistant) -> None: unique_id=f"{DOMAIN}_{MOCK_USER_DATA[CONF_LOCATION][CONF_LATITUDE]}_{MOCK_USER_DATA[CONF_LOCATION][CONF_LONGITUDE]}", ) mock_config.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with patch( - "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", - return_value=NEARBY_STATIONS, + with ( + patch( + "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", + return_value=NEARBY_STATIONS, + ), + patch( + "homeassistant.config_entries.ConfigEntries.async_reload" + ) as mock_async_reload, ): result = await hass.config_entries.options.async_init(mock_config.entry_id) assert result["type"] is FlowResultType.FORM @@ -237,6 +245,10 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert not mock_config.options[CONF_SHOW_ON_MAP] + await hass.async_block_till_done() + + assert mock_async_reload.call_count == 1 + async def test_options_flow_error(hass: HomeAssistant) -> None: """Test options flow.""" diff --git a/tests/components/tankerkoenig/test_coordinator.py b/tests/components/tankerkoenig/test_coordinator.py index 1e8991f3f9c..3ba0dc31c5f 100644 --- a/tests/components/tankerkoenig/test_coordinator.py +++ b/tests/components/tankerkoenig/test_coordinator.py @@ -15,14 +15,20 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.tankerkoenig.const import DEFAULT_SCAN_INTERVAL, DOMAIN +from homeassistant.components.tankerkoenig.const import ( + CONF_STATIONS, + DEFAULT_SCAN_INTERVAL, + DOMAIN, +) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ID, CONF_SHOW_ON_MAP, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from .const import CONFIG_DATA + from tests.common import MockConfigEntry, async_fire_time_changed @@ -190,3 +196,38 @@ async def test_automatic_registry_cleanup( len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 1 ) + + +async def test_many_stations_warning( + hass: HomeAssistant, tankerkoenig: AsyncMock, caplog: pytest.LogCaptureFixture +) -> None: + """Test the warning about morethan 10 selected stations.""" + mock_config = MockConfigEntry( + domain=DOMAIN, + data={ + **CONFIG_DATA, + CONF_STATIONS: [ + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + "36b4b812-xxxx-xxxx-xxxx-c51735325858", + "54e2b642-xxxx-xxxx-xxxx-87cd4e9867f1", + "11b5c130-xxxx-xxxx-xxxx-856b8489b528", + "a9137924-xxxx-xxxx-xxxx-7029d7eb073f", + "57c6d275-xxxx-xxxx-xxxx-7f6ad9e6d638", + "bbc3c3a2-xxxx-xxxx-xxxx-840cc3d496b6", + "1db63dd9-xxxx-xxxx-xxxx-a889b53cbc65", + "18d7262e-xxxx-xxxx-xxxx-4a61ad302e14", + "a8041aa3-xxxx-xxxx-xxxx-7c6b180e5a40", + "739aa0eb-xxxx-xxxx-xxxx-a3d7b6c8a42f", + "9ad9fb26-xxxx-xxxx-xxxx-84e6a02b3096", + "74267867-xxxx-xxxx-xxxx-74ce3d45882c", + "86657222-xxxx-xxxx-xxxx-a2b795ab3cf9", + ], + }, + options={CONF_SHOW_ON_MAP: True}, + unique_id="51.0_13.0", + ) + mock_config.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + assert "Found more than 10 stations to check" in caplog.text diff --git a/tests/components/tankerkoenig/test_sensor.py b/tests/components/tankerkoenig/test_sensor.py new file mode 100644 index 00000000000..788c1de7021 --- /dev/null +++ b/tests/components/tankerkoenig/test_sensor.py @@ -0,0 +1,65 @@ +"""Tests for the Tankerkoening integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.tankerkoenig import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import PRICES_MISSING_FUELTYPE, STATION_MISSING_FUELTYPE + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_integration") +async def test_sensor( + hass: HomeAssistant, + tankerkoenig: AsyncMock, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the tankerkoenig sensors.""" + + state = hass.states.get("sensor.station_somewhere_street_1_super_e10") + assert state + assert state.state == "1.659" + assert state.attributes == snapshot + + state = hass.states.get("sensor.station_somewhere_street_1_super") + assert state + assert state.state == "1.719" + assert state.attributes == snapshot + + state = hass.states.get("sensor.station_somewhere_street_1_diesel") + assert state + assert state.state == "1.659" + assert state.attributes == snapshot + + +async def test_sensor_missing_fueltype( + hass: HomeAssistant, + tankerkoenig: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test the tankerkoenig sensors.""" + tankerkoenig.station_details.return_value = STATION_MISSING_FUELTYPE + tankerkoenig.prices.return_value = PRICES_MISSING_FUELTYPE + + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + state = hass.states.get("sensor.station_somewhere_street_1_super_e10") + assert state + + state = hass.states.get("sensor.station_somewhere_street_1_super") + assert state + + state = hass.states.get("sensor.station_somewhere_street_1_diesel") + assert not state From 62dadc47ff442ceea8f741c0eb054175cb1e2a34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Apr 2024 00:02:31 +0200 Subject: [PATCH 334/426] Bump github/codeql-action from 3.25.1 to 3.25.2 (#116016) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 6a366a7ab8d..d1393c97462 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.3 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.1 + uses: github/codeql-action/init@v3.25.2 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.1 + uses: github/codeql-action/analyze@v3.25.2 with: category: "/language:python" From f9c2cd73f555d8e9e98f327d8cee6e86ce367a04 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 02:19:18 +0200 Subject: [PATCH 335/426] Fix non-thread-safe operations in media_extractor (#116065) --- homeassistant/components/media_extractor/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/media_extractor/__init__.py b/homeassistant/components/media_extractor/__init__.py index 139acf06cf6..56b768c26a2 100644 --- a/homeassistant/components/media_extractor/__init__.py +++ b/homeassistant/components/media_extractor/__init__.py @@ -55,7 +55,7 @@ CONFIG_SCHEMA = vol.Schema( ) -def setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the media extractor service.""" async def extract_media_url(call: ServiceCall) -> ServiceResponse: @@ -114,7 +114,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: supports_response=SupportsResponse.ONLY, ) - hass.services.register( + hass.services.async_register( DOMAIN, SERVICE_PLAY_MEDIA, play_media, From e3016b131a28ffb44144afc37ce0f1770e899fba Mon Sep 17 00:00:00 2001 From: David Friedland Date: Tue, 23 Apr 2024 18:22:03 -0700 Subject: [PATCH 336/426] Add Event support to ESPHome components (#116061) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- .../components/esphome/entry_data.py | 5 +- homeassistant/components/esphome/event.py | 48 +++++++++++++++++++ tests/components/esphome/test_event.py | 38 +++++++++++++++ 3 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/esphome/event.py create mode 100644 tests/components/esphome/test_event.py diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index a840fc3a17e..7316c09cc5e 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -24,6 +24,8 @@ from aioesphomeapi import ( DeviceInfo, EntityInfo, EntityState, + Event, + EventInfo, FanInfo, LightInfo, LockInfo, @@ -70,6 +72,7 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = { CoverInfo: Platform.COVER, DateInfo: Platform.DATE, DateTimeInfo: Platform.DATETIME, + EventInfo: Platform.EVENT, FanInfo: Platform.FAN, LightInfo: Platform.LIGHT, LockInfo: Platform.LOCK, @@ -345,7 +348,7 @@ class RuntimeEntryData: if ( current_state == state and subscription_key not in stale_state - and state_type is not CameraState + and state_type not in (CameraState, Event) and not ( state_type is SensorState and (platform_info := self.info.get(SensorInfo)) diff --git a/homeassistant/components/esphome/event.py b/homeassistant/components/esphome/event.py new file mode 100644 index 00000000000..3c7331beba0 --- /dev/null +++ b/homeassistant/components/esphome/event.py @@ -0,0 +1,48 @@ +"""Support for ESPHome event components.""" + +from __future__ import annotations + +from aioesphomeapi import EntityInfo, Event, EventInfo + +from homeassistant.components.event import EventDeviceClass, EventEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.enum import try_parse_enum + +from .entity import EsphomeEntity, platform_async_setup_entry + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up ESPHome event based on a config entry.""" + await platform_async_setup_entry( + hass, + entry, + async_add_entities, + info_type=EventInfo, + entity_type=EsphomeEvent, + state_type=Event, + ) + + +class EsphomeEvent(EsphomeEntity[EventInfo, Event], EventEntity): + """An event implementation for ESPHome.""" + + @callback + def _on_static_info_update(self, static_info: EntityInfo) -> None: + """Set attrs from static info.""" + super()._on_static_info_update(static_info) + static_info = self._static_info + if event_types := static_info.event_types: + self._attr_event_types = event_types + self._attr_device_class = try_parse_enum( + EventDeviceClass, static_info.device_class + ) + + @callback + def _on_state_update(self) -> None: + self._update_state_from_entry_data() + self._trigger_event(self._state.event_type) + self.async_write_ha_state() diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py new file mode 100644 index 00000000000..c17dc4d98a9 --- /dev/null +++ b/tests/components/esphome/test_event.py @@ -0,0 +1,38 @@ +"""Test ESPHome Events.""" + +from aioesphomeapi import APIClient, Event, EventInfo +import pytest + +from homeassistant.components.event import EventDeviceClass +from homeassistant.core import HomeAssistant + + +@pytest.mark.freeze_time("2024-04-24 00:00:00+00:00") +async def test_generic_event_entity( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic event entity.""" + entity_info = [ + EventInfo( + object_id="myevent", + key=1, + name="my event", + unique_id="my_event", + event_types=["type1", "type2"], + device_class=EventDeviceClass.BUTTON, + ) + ] + states = [Event(key=1, event_type="type1")] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("event.test_myevent") + assert state is not None + assert state.state == "2024-04-24T00:00:00.000+00:00" + assert state.attributes["event_type"] == "type1" From f2336a5a3abff6c6ed17baf17c5c931631ec5420 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 03:31:44 +0200 Subject: [PATCH 337/426] Fix non-thread-safe operation in harmony (#116070) Fix unsafe thread operation in harmony https://github.com/home-assistant/core/actions/runs/8808429751/job/24177716644?pr=116066 --- homeassistant/components/harmony/entity.py | 13 +++++++++---- homeassistant/components/harmony/remote.py | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/harmony/entity.py b/homeassistant/components/harmony/entity.py index 99b5744e0ed..8bfa9fbad4d 100644 --- a/homeassistant/components/harmony/entity.py +++ b/homeassistant/components/harmony/entity.py @@ -6,6 +6,7 @@ from collections.abc import Callable from datetime import datetime import logging +from homeassistant.core import callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_call_later @@ -38,7 +39,7 @@ class HarmonyEntity(Entity): _LOGGER.debug("%s: connected to the HUB", self._data.name) self.async_write_ha_state() - self._clear_disconnection_delay() + self._async_clear_disconnection_delay() async def async_got_disconnected(self, _: str | None = None) -> None: """Notification that we're disconnected from the HUB.""" @@ -46,15 +47,19 @@ class HarmonyEntity(Entity): # We're going to wait for 10 seconds before announcing we're # unavailable, this to allow a reconnection to happen. self._unsub_mark_disconnected = async_call_later( - self.hass, TIME_MARK_DISCONNECTED, self._mark_disconnected_if_unavailable + self.hass, + TIME_MARK_DISCONNECTED, + self._async_mark_disconnected_if_unavailable, ) - def _clear_disconnection_delay(self) -> None: + @callback + def _async_clear_disconnection_delay(self) -> None: if self._unsub_mark_disconnected: self._unsub_mark_disconnected() self._unsub_mark_disconnected = None - def _mark_disconnected_if_unavailable(self, _: datetime) -> None: + @callback + def _async_mark_disconnected_if_unavailable(self, _: datetime) -> None: self._unsub_mark_disconnected = None if not self.available: # Still disconnected. Let the state engine know. diff --git a/homeassistant/components/harmony/remote.py b/homeassistant/components/harmony/remote.py index c6b2e9be718..0c9bdcb9c6e 100644 --- a/homeassistant/components/harmony/remote.py +++ b/homeassistant/components/harmony/remote.py @@ -138,7 +138,7 @@ class HarmonyRemote(HarmonyEntity, RemoteEntity, RestoreEntity): _LOGGER.debug("%s: Harmony Hub added", self._data.name) - self.async_on_remove(self._clear_disconnection_delay) + self.async_on_remove(self._async_clear_disconnection_delay) self._setup_callbacks() self.async_on_remove( From b1b8b8ba00c5e2a55e1b65fd1c434cb2d89659ed Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 03:32:07 +0200 Subject: [PATCH 338/426] Fix non-thread-safe operations in wake_on_lan (#116069) Fix unsafe thread operations in wake_on_lan https://github.com/home-assistant/core/actions/runs/8808429751/job/24177715837?pr=116066 --- homeassistant/components/wake_on_lan/switch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index a0b54fd8db0..e5c3a055310 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -129,7 +129,7 @@ class WolSwitch(SwitchEntity): if self._attr_assumed_state: self._state = True - self.async_write_ha_state() + self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn the device off if an off action is present.""" @@ -138,7 +138,7 @@ class WolSwitch(SwitchEntity): if self._attr_assumed_state: self._state = False - self.async_write_ha_state() + self.schedule_update_ha_state() def update(self) -> None: """Check if device is on and update the state. Only called if assumed state is false.""" From 9d54aa205be26e172e89ed40d5be520ecf8c1caf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 03:33:19 +0200 Subject: [PATCH 339/426] Fix non-thread-safe operations in html5 (#116068) Fix non thread-safe calls in html5 https://github.com/home-assistant/core/actions/runs/8808425552/job/24177668764?pr=116055 --- homeassistant/components/html5/notify.py | 4 +- tests/components/html5/test_notify.py | 232 ++++++++++++----------- 2 files changed, 122 insertions(+), 114 deletions(-) diff --git a/homeassistant/components/html5/notify.py b/homeassistant/components/html5/notify.py index 782340dffa6..6049f8e2434 100644 --- a/homeassistant/components/html5/notify.py +++ b/homeassistant/components/html5/notify.py @@ -165,7 +165,7 @@ HTML5_SHOWNOTIFICATION_PARAMETERS = ( ) -def get_service( +async def async_get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, @@ -173,7 +173,7 @@ def get_service( """Get the HTML5 push notification service.""" json_path = hass.config.path(REGISTRATIONS_FILE) - registrations = _load_config(json_path) + registrations = await hass.async_add_executor_job(_load_config, json_path) vapid_pub_key = config[ATTR_VAPID_PUB_KEY] vapid_prv_key = config[ATTR_VAPID_PRV_KEY] diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index 6763708cc38..ec14b38cd69 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -2,7 +2,7 @@ from http import HTTPStatus import json -from unittest.mock import MagicMock, mock_open, patch +from unittest.mock import mock_open, patch from aiohttp.hdrs import AUTHORIZATION @@ -83,166 +83,174 @@ async def mock_client(hass, hass_client, registrations=None): return await hass_client() -class TestHtml5Notify: - """Tests for HTML5 notify platform.""" +async def test_get_service_with_no_json(hass: HomeAssistant): + """Test empty json file.""" + await async_setup_component(hass, "http", {}) + m = mock_open() + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) - def test_get_service_with_no_json(self): - """Test empty json file.""" - hass = MagicMock() + assert service is not None - m = mock_open() - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) - assert service is not None +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_dismissing_message(mock_wp, hass: HomeAssistant): + """Test dismissing message.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - @patch("homeassistant.components.html5.notify.WebPusher") - def test_dismissing_message(self, mock_wp): - """Test dismissing message.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + data = {"device": SUBSCRIPTION_1} - data = {"device": SUBSCRIPTION_1} + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + assert service is not None - assert service is not None + await service.async_dismiss(target=["device", "non_existing"], data={"tag": "test"}) - service.dismiss(target=["device", "non_existing"], data={"tag": "test"}) + assert len(mock_wp.mock_calls) == 4 - assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] + # Call to send + payload = json.loads(mock_wp.mock_calls[3][2]["data"]) - # Call to send - payload = json.loads(mock_wp.mock_calls[3][2]["data"]) + assert payload["dismiss"] is True + assert payload["tag"] == "test" - assert payload["dismiss"] is True - assert payload["tag"] == "test" - @patch("homeassistant.components.html5.notify.WebPusher") - def test_sending_message(self, mock_wp): - """Test sending message.""" - hass = MagicMock() - mock_wp().send().status_code = 201 +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_sending_message(mock_wp, hass: HomeAssistant): + """Test sending message.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - data = {"device": SUBSCRIPTION_1} + data = {"device": SUBSCRIPTION_1} - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - assert service is not None + assert service is not None - service.send_message( - "Hello", target=["device", "non_existing"], data={"icon": "beer.png"} - ) + await service.async_send_message( + "Hello", target=["device", "non_existing"], data={"icon": "beer.png"} + ) - assert len(mock_wp.mock_calls) == 4 + assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] - # Call to send - payload = json.loads(mock_wp.mock_calls[3][2]["data"]) + # Call to send + payload = json.loads(mock_wp.mock_calls[3][2]["data"]) - assert payload["body"] == "Hello" - assert payload["icon"] == "beer.png" + assert payload["body"] == "Hello" + assert payload["icon"] == "beer.png" - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_key_include(self, mock_wp): - """Test if the FCM header is included.""" - hass = MagicMock() - mock_wp().send().status_code = 201 - data = {"chrome": SUBSCRIPTION_5} +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_key_include(mock_wp, hass: HomeAssistant): + """Test if the FCM header is included.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + data = {"chrome": SUBSCRIPTION_5} - assert service is not None + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - service.send_message("Hello", target=["chrome"]) + assert service is not None - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + await service.async_send_message("Hello", target=["chrome"]) - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["Authorization"] is not None + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_send_with_unknown_priority(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["Authorization"] is not None - data = {"chrome": SUBSCRIPTION_5} - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_send_with_unknown_priority(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - assert service is not None + data = {"chrome": SUBSCRIPTION_5} - service.send_message("Hello", target=["chrome"], priority="undefined") + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + assert service is not None - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + await service.async_send_message("Hello", target=["chrome"], priority="undefined") - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_no_targets(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - data = {"chrome": SUBSCRIPTION_5} + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) - assert service is not None +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_no_targets(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - service.send_message("Hello") + data = {"chrome": SUBSCRIPTION_5} - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + assert service is not None - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_additional_data(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + await service.async_send_message("Hello") - data = {"chrome": SUBSCRIPTION_5} + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" - assert service is not None - service.send_message("Hello", data={"mykey": "myvalue"}) +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_additional_data(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + data = {"chrome": SUBSCRIPTION_5} - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass + + assert service is not None + + await service.async_send_message("Hello", data={"mykey": "myvalue"}) + + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" async def test_registering_new_device_view( From 53a179088fe2b04804ac1f547333f2f65aea551b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 03:36:05 +0200 Subject: [PATCH 340/426] Add debug mode to catch unsafe thread operations using core helpers (#115390) * adjust * adjust * fixes * one more * test * debug * move to config * cover * Update homeassistant/core.py * set debug from RuntimeConfig * reduce * fix message * raise * Update homeassistant/core.py * Update homeassistant/core.py * no flood check for raise * cover --- homeassistant/bootstrap.py | 2 ++ homeassistant/config.py | 5 ++++ homeassistant/const.py | 1 + homeassistant/core.py | 20 +++++++++++++- homeassistant/helpers/dispatcher.py | 3 +++ homeassistant/helpers/entity.py | 2 ++ homeassistant/helpers/frame.py | 25 +++++++++++++---- homeassistant/helpers/template.py | 2 ++ homeassistant/runner.py | 1 + homeassistant/util/async_.py | 3 +-- tests/helpers/test_dispatcher.py | 21 +++++++++++++++ tests/helpers/test_entity.py | 21 +++++++++++++++ tests/helpers/test_frame.py | 42 +++++++++++++++++++++++++++++ tests/helpers/test_template.py | 17 ++++++++++++ tests/test_bootstrap.py | 16 ++++++++++- tests/test_config.py | 2 ++ tests/test_core.py | 20 ++++++++++++++ tests/util/test_async.py | 4 ++- 18 files changed, 197 insertions(+), 10 deletions(-) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index afb364e6d2f..10ba0392f15 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -253,6 +253,8 @@ async def async_setup_hass( runtime_config.log_no_color, ) + if runtime_config.debug: + hass.config.debug = True hass.config.safe_mode = runtime_config.safe_mode hass.config.skip_pip = runtime_config.skip_pip hass.config.skip_pip_packages = runtime_config.skip_pip_packages diff --git a/homeassistant/config.py b/homeassistant/config.py index 61b346944fa..abb29f6a1a1 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -39,6 +39,7 @@ from .const import ( CONF_CUSTOMIZE, CONF_CUSTOMIZE_DOMAIN, CONF_CUSTOMIZE_GLOB, + CONF_DEBUG, CONF_ELEVATION, CONF_EXTERNAL_URL, CONF_ID, @@ -391,6 +392,7 @@ CORE_CONFIG_SCHEMA = vol.All( vol.Optional(CONF_CURRENCY): _validate_currency, vol.Optional(CONF_COUNTRY): cv.country, vol.Optional(CONF_LANGUAGE): cv.language, + vol.Optional(CONF_DEBUG): cv.boolean, } ), _filter_bad_internal_external_urls, @@ -899,6 +901,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non if key in config: setattr(hac, attr, config[key]) + if config.get(CONF_DEBUG): + hac.debug = True + _raise_issue_if_legacy_templates(hass, config.get(CONF_LEGACY_TEMPLATES)) _raise_issue_if_historic_currency(hass, hass.config.currency) _raise_issue_if_no_country(hass, hass.config.country) diff --git a/homeassistant/const.py b/homeassistant/const.py index 58a1c92ea72..ba83eca58d8 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -296,6 +296,7 @@ CONF_WHILE: Final = "while" CONF_WHITELIST: Final = "whitelist" CONF_ALLOWLIST_EXTERNAL_DIRS: Final = "allowlist_external_dirs" LEGACY_CONF_WHITELIST_EXTERNAL_DIRS: Final = "whitelist_external_dirs" +CONF_DEBUG: Final = "debug" CONF_XY: Final = "xy" CONF_ZONE: Final = "zone" diff --git a/homeassistant/core.py b/homeassistant/core.py index 01329806e61..75460ea5759 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -429,6 +429,20 @@ class HomeAssistant: max_workers=1, thread_name_prefix="ImportExecutor" ) + def verify_event_loop_thread(self, what: str) -> None: + """Report and raise if we are not running in the event loop thread.""" + if ( + loop_thread_ident := self.loop.__dict__.get("_thread_ident") + ) and loop_thread_ident != threading.get_ident(): + from .helpers import frame # pylint: disable=import-outside-toplevel + + # frame is a circular import, so we import it here + frame.report( + f"calls {what} from a thread", + error_if_core=True, + error_if_integration=True, + ) + @property def _active_tasks(self) -> set[asyncio.Future[Any]]: """Return all active tasks. @@ -503,7 +517,6 @@ class HomeAssistant: This method is a coroutine. """ _LOGGER.info("Starting Home Assistant") - setattr(self.loop, "_thread_ident", threading.get_ident()) self.set_state(CoreState.starting) self.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE) @@ -1451,6 +1464,9 @@ class EventBus: This method must be run in the event loop. """ + if self._hass.config.debug: + self._hass.verify_event_loop_thread("async_fire") + if len(event_type) > MAX_LENGTH_EVENT_EVENT_TYPE: raise MaxLengthExceeded( event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE @@ -2749,6 +2765,7 @@ class Config: self.elevation: int = 0 """Elevation (always in meters regardless of the unit system).""" + self.debug: bool = False self.location_name: str = "Home" self.time_zone: str = "UTC" self.units: UnitSystem = METRIC_SYSTEM @@ -2889,6 +2906,7 @@ class Config: "country": self.country, "language": self.language, "safe_mode": self.safe_mode, + "debug": self.debug, } def set_time_zone(self, time_zone_str: str) -> None: diff --git a/homeassistant/helpers/dispatcher.py b/homeassistant/helpers/dispatcher.py index 52d57e9cf08..aa8176a1b83 100644 --- a/homeassistant/helpers/dispatcher.py +++ b/homeassistant/helpers/dispatcher.py @@ -199,6 +199,9 @@ def async_dispatcher_send( This method must be run in the event loop. """ + if hass.config.debug: + hass.verify_event_loop_thread("async_dispatcher_send") + if (maybe_dispatchers := hass.data.get(DATA_DISPATCHER)) is None: return dispatchers: _DispatcherDataType[*_Ts] = maybe_dispatchers diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 086def8a8be..40b145727a1 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -971,6 +971,8 @@ class Entity( """Write the state to the state machine.""" if self.hass is None: raise RuntimeError(f"Attribute hass is None for {self}") + if self.hass.config.debug: + self.hass.verify_event_loop_thread("async_write_ha_state") # The check for self.platform guards against integrations not using an # EntityComponent and can be removed in HA Core 2024.1 diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index d86fec3de43..068a12c0598 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -136,6 +136,7 @@ def report( error_if_core: bool = True, level: int = logging.WARNING, log_custom_component_only: bool = False, + error_if_integration: bool = False, ) -> None: """Report incorrect usage. @@ -153,14 +154,19 @@ def report( _LOGGER.warning(msg, stack_info=True) return - if not log_custom_component_only or integration_frame.custom_integration: - _report_integration(what, integration_frame, level) + if ( + error_if_integration + or not log_custom_component_only + or integration_frame.custom_integration + ): + _report_integration(what, integration_frame, level, error_if_integration) def _report_integration( what: str, integration_frame: IntegrationFrame, level: int = logging.WARNING, + error: bool = False, ) -> None: """Report incorrect usage in an integration. @@ -168,7 +174,7 @@ def _report_integration( """ # Keep track of integrations already reported to prevent flooding key = f"{integration_frame.filename}:{integration_frame.line_number}" - if key in _REPORTED_INTEGRATIONS: + if not error and key in _REPORTED_INTEGRATIONS: return _REPORTED_INTEGRATIONS.add(key) @@ -180,11 +186,11 @@ def _report_integration( integration_domain=integration_frame.integration, module=integration_frame.module, ) - + integration_type = "custom " if integration_frame.custom_integration else "" _LOGGER.log( level, "Detected that %sintegration '%s' %s at %s, line %s: %s, please %s", - "custom " if integration_frame.custom_integration else "", + integration_type, integration_frame.integration, what, integration_frame.relative_filename, @@ -192,6 +198,15 @@ def _report_integration( integration_frame.line, report_issue, ) + if not error: + return + raise RuntimeError( + f"Detected that {integration_type}integration " + f"'{integration_frame.integration}' {what} at " + f"{integration_frame.relative_filename}, line " + f"{integration_frame.line_number}: {integration_frame.line}. " + f"Please {report_issue}." + ) def warn_use(func: _CallableT, what: str) -> _CallableT: diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index a1ba1279292..24baab96a4e 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -695,6 +695,8 @@ class Template: **kwargs: Any, ) -> RenderInfo: """Render the template and collect an entity filter.""" + if self.hass and self.hass.config.debug: + self.hass.verify_event_loop_thread("async_render_to_info") self._renders += 1 assert self.hass and _render_info.get() is None diff --git a/homeassistant/runner.py b/homeassistant/runner.py index f036c7d6322..4e2326d4ea7 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -107,6 +107,7 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): def new_event_loop(self) -> asyncio.AbstractEventLoop: """Get the event loop.""" loop: asyncio.AbstractEventLoop = super().new_event_loop() + setattr(loop, "_thread_ident", threading.get_ident()) loop.set_exception_handler(_async_loop_exception_handler) if self.debug: loop.set_debug(True) diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index 0cf9fc992c5..19c20207e1d 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -52,8 +52,7 @@ def run_callback_threadsafe( Return a concurrent.futures.Future to access the result. """ - ident = loop.__dict__.get("_thread_ident") - if ident is not None and ident == threading.get_ident(): + if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident(): raise RuntimeError("Cannot be called from within the event loop") future: concurrent.futures.Future[_T] = concurrent.futures.Future() diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index 149231a9368..d9a79cc6a7a 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -239,3 +239,24 @@ async def test_dispatcher_add_dispatcher(hass: HomeAssistant) -> None: async_dispatcher_send(hass, "test", 5) assert calls == [3, 4, 4, 5, 5] + + +async def test_thread_safety_checks(hass: HomeAssistant) -> None: + """Test dispatcher thread safety checks.""" + hass.config.debug = True + calls = [] + + @callback + def _dispatcher(data): + calls.append(data) + + async_dispatcher_connect(hass, "test", _dispatcher) + + with pytest.raises( + RuntimeError, + match="Detected code that calls async_dispatcher_send from a thread.", + ): + await hass.async_add_executor_job(async_dispatcher_send, hass, "test", 3) + + async_dispatcher_send(hass, "test", 4) + assert calls == [4] diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 690592a850b..349c065f9b5 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -2594,3 +2594,24 @@ async def test_get_hassjob_type(hass: HomeAssistant) -> None: assert ent_1.get_hassjob_type("update") is HassJobType.Executor assert ent_1.get_hassjob_type("async_update") is HassJobType.Coroutinefunction assert ent_1.get_hassjob_type("update_callback") is HassJobType.Callback + + +async def test_async_write_ha_state_thread_safety(hass: HomeAssistant) -> None: + """Test async_write_ha_state thread safety.""" + hass.config.debug = True + + ent = entity.Entity() + ent.entity_id = "test.any" + ent.hass = hass + ent.async_write_ha_state() + assert hass.states.get(ent.entity_id) + + ent2 = entity.Entity() + ent2.entity_id = "test.any2" + ent2.hass = hass + with pytest.raises( + RuntimeError, + match="Detected code that calls async_write_ha_state from a thread.", + ): + await hass.async_add_executor_job(ent2.async_write_ha_state) + assert not hass.states.get(ent2.entity_id) diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index fe215264f59..904bed965c8 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -205,3 +205,45 @@ async def test_report_missing_integration_frame( frame.report(what, error_if_core=False, log_custom_component_only=True) assert caplog.text == "" + + +@pytest.mark.parametrize("run_count", [1, 2]) +# Run this twice to make sure the flood check does not +# kick in when error_if_integration=True +async def test_report_error_if_integration( + caplog: pytest.LogCaptureFixture, run_count: int +) -> None: + """Test RuntimeError is raised if error_if_integration is set.""" + frames = extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="23", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ) + with ( + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=frames, + ), + pytest.raises( + RuntimeError, + match=( + "Detected that integration 'hue' did a bad" + " thing at homeassistant/components/hue/light.py" + ), + ), + ): + frame.report("did a bad thing", error_if_integration=True) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index ec5b76964f7..f55a94d7283 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -5757,3 +5757,20 @@ async def test_label_areas( info = render_to_info(hass, f"{{{{ '{label.name}' | label_areas }}}}") assert_result_info(info, [master_bedroom.id]) assert info.rate_limit is None + + +async def test_template_thread_safety_checks(hass: HomeAssistant) -> None: + """Test template thread safety checks.""" + hass.states.async_set("sensor.test", "23") + template_str = "{{ states('sensor.test') }}" + template_obj = template.Template(template_str, None) + template_obj.hass = hass + hass.config.debug = True + + with pytest.raises( + RuntimeError, + match="Detected code that calls async_render_to_info from a thread.", + ): + await hass.async_add_executor_job(template_obj.async_render_to_info) + + assert template_obj.async_render_to_info().result() == 23 diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 12eb52c06f4..6b96fb43d1f 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -13,7 +13,7 @@ import pytest from homeassistant import bootstrap, loader, runner import homeassistant.config as config_util from homeassistant.config_entries import HANDLERS, ConfigEntry -from homeassistant.const import SIGNAL_BOOTSTRAP_INTEGRATIONS +from homeassistant.const import CONF_DEBUG, SIGNAL_BOOTSTRAP_INTEGRATIONS from homeassistant.core import CoreState, HomeAssistant, async_get_hass, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -112,6 +112,16 @@ async def test_empty_setup(hass: HomeAssistant) -> None: assert domain in hass.config.components, domain +@pytest.mark.parametrize("load_registries", [False]) +async def test_config_does_not_turn_off_debug(hass: HomeAssistant) -> None: + """Test that config does not turn off debug if its turned on by runtime config.""" + # Mock that its turned on from RuntimeConfig + hass.config.debug = True + + await bootstrap.async_from_config_dict({CONF_DEBUG: False}, hass) + assert hass.config.debug is True + + @pytest.mark.parametrize("load_registries", [False]) async def test_preload_translations(hass: HomeAssistant) -> None: """Test translations are preloaded for all frontend deps and base platforms.""" @@ -599,6 +609,7 @@ async def test_setup_hass( log_no_color=log_no_color, skip_pip=True, recovery_mode=False, + debug=True, ), ) @@ -619,6 +630,9 @@ async def test_setup_hass( assert len(mock_ensure_config_exists.mock_calls) == 1 assert len(mock_process_ha_config_upgrade.mock_calls) == 1 + # debug in RuntimeConfig should set it it in hass.config + assert hass.config.debug is True + assert hass == async_get_hass() diff --git a/tests/test_config.py b/tests/test_config.py index defd6a1018b..58529fb0057 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -857,6 +857,7 @@ async def test_loading_configuration(hass: HomeAssistant) -> None: "internal_url": "http://example.local", "media_dirs": {"mymedia": "/usr"}, "legacy_templates": True, + "debug": True, "currency": "EUR", "country": "SE", "language": "sv", @@ -877,6 +878,7 @@ async def test_loading_configuration(hass: HomeAssistant) -> None: assert hass.config.media_dirs == {"mymedia": "/usr"} assert hass.config.config_source is ConfigSource.YAML assert hass.config.legacy_templates is True + assert hass.config.debug is True assert hass.config.currency == "EUR" assert hass.config.country == "SE" assert hass.config.language == "sv" diff --git a/tests/test_core.py b/tests/test_core.py index 30665619fcd..2f5276eec87 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1990,6 +1990,7 @@ async def test_config_as_dict() -> None: "country": None, "language": "en", "safe_mode": False, + "debug": False, } assert expected == config.as_dict() @@ -3439,3 +3440,22 @@ async def test_top_level_components(hass: HomeAssistant) -> None: hass.config.components.remove("homeassistant.scene") with pytest.raises(NotImplementedError): hass.config.components.discard("homeassistant") + + +async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: + """Test debug mode defaults to off.""" + assert not hass.config.debug + + +async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: + """Test async_fire thread safety.""" + hass.config.debug = True + + events = async_capture_events(hass, "test_event") + hass.bus.async_fire("test_event") + with pytest.raises( + RuntimeError, match="Detected code that calls async_fire from a thread." + ): + await hass.async_add_executor_job(hass.bus.async_fire, "test_event") + + assert len(events) == 1 diff --git a/tests/util/test_async.py b/tests/util/test_async.py index 157becc4b01..ac927b1375a 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -76,7 +76,8 @@ async def test_run_callback_threadsafe(hass: HomeAssistant) -> None: nonlocal it_ran it_ran = True - assert hasync.run_callback_threadsafe(hass.loop, callback) + with patch.dict(hass.loop.__dict__, {"_thread_ident": -1}): + assert hasync.run_callback_threadsafe(hass.loop, callback) assert it_ran is False # Verify that async_block_till_done will flush @@ -95,6 +96,7 @@ async def test_callback_is_always_scheduled(hass: HomeAssistant) -> None: hasync.shutdown_run_callback_threadsafe(hass.loop) with ( + patch.dict(hass.loop.__dict__, {"_thread_ident": -1}), patch.object(hass.loop, "call_soon_threadsafe") as mock_call_soon_threadsafe, pytest.raises(RuntimeError), ): From 4a59ee978cbb2eff56bf50254950e8b68baf06f8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 06:41:55 +0200 Subject: [PATCH 341/426] Always do thread safety checks when calling async_fire (#116055) --- homeassistant/core.py | 18 ++++++++++-------- tests/test_core.py | 2 -- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 75460ea5759..189dc2f9d8a 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -1407,6 +1407,12 @@ class _OneTimeListener(Generic[_DataT]): EMPTY_LIST: list[Any] = [] +def _verify_event_type_length_or_raise(event_type: EventType[_DataT] | str) -> None: + """Verify the length of the event type and raise if too long.""" + if len(event_type) > MAX_LENGTH_EVENT_EVENT_TYPE: + raise MaxLengthExceeded(event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE) + + class EventBus: """Allow the firing of and listening for events.""" @@ -1447,8 +1453,9 @@ class EventBus: context: Context | None = None, ) -> None: """Fire an event.""" + _verify_event_type_length_or_raise(event_type) self._hass.loop.call_soon_threadsafe( - self.async_fire, event_type, event_data, origin, context + self.async_fire_internal, event_type, event_data, origin, context ) @callback @@ -1464,13 +1471,8 @@ class EventBus: This method must be run in the event loop. """ - if self._hass.config.debug: - self._hass.verify_event_loop_thread("async_fire") - - if len(event_type) > MAX_LENGTH_EVENT_EVENT_TYPE: - raise MaxLengthExceeded( - event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE - ) + _verify_event_type_length_or_raise(event_type) + self._hass.verify_event_loop_thread("async_fire") return self.async_fire_internal( event_type, event_data, origin, context, time_fired ) diff --git a/tests/test_core.py b/tests/test_core.py index 2f5276eec87..6bab89bca85 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -3449,8 +3449,6 @@ async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: """Test async_fire thread safety.""" - hass.config.debug = True - events = async_capture_events(hass, "test_event") hass.bus.async_fire("test_event") with pytest.raises( From b37f7b1ff0a6f9e28ad154ef210cb19e51e02572 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Wed, 24 Apr 2024 07:23:24 +0200 Subject: [PATCH 342/426] Enable Ruff RUF019 (#115396) * Enable Ruff RUF019 * fix tado tests * review comments --- homeassistant/components/bluesound/media_player.py | 2 +- homeassistant/components/isy994/light.py | 7 ++----- homeassistant/components/tado/__init__.py | 4 ++-- homeassistant/components/velbus/__init__.py | 2 +- homeassistant/components/xiaomi_miio/remote.py | 4 ++-- homeassistant/helpers/config_validation.py | 4 ++-- pyproject.toml | 1 + 7 files changed, 11 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index cb6f013dbf8..6c63067a1c1 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -934,7 +934,7 @@ class BluesoundPlayer(MediaPlayerEntity): selected_source = items[0] url = f"Play?url={selected_source['url']}&preset_id&image={selected_source['image']}" - if "is_raw_url" in selected_source and selected_source["is_raw_url"]: + if selected_source.get("is_raw_url"): url = selected_source["url"] return await self.send_bluesound_command(url) diff --git a/homeassistant/components/isy994/light.py b/homeassistant/components/isy994/light.py index 69701534840..b9b269d9ca3 100644 --- a/homeassistant/components/isy994/light.py +++ b/homeassistant/components/isy994/light.py @@ -114,8 +114,5 @@ class ISYLightEntity(ISYNodeEntity, LightEntity, RestoreEntity): if not (last_state := await self.async_get_last_state()): return - if ( - ATTR_LAST_BRIGHTNESS in last_state.attributes - and last_state.attributes[ATTR_LAST_BRIGHTNESS] - ): - self._last_brightness = last_state.attributes[ATTR_LAST_BRIGHTNESS] + if last_brightness := last_state.attributes.get(ATTR_LAST_BRIGHTNESS): + self._last_brightness = last_brightness diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 5ab7a6f67b8..8f69ccdaffb 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -221,7 +221,7 @@ class TadoConnector: # Errors are planned to be converted to exceptions # in PyTado library, so this can be removed - if "errors" in mobile_devices and mobile_devices["errors"]: + if isinstance(mobile_devices, dict) and mobile_devices.get("errors"): _LOGGER.error( "Error for home ID %s while updating mobile devices: %s", self.home_id, @@ -256,7 +256,7 @@ class TadoConnector: # Errors are planned to be converted to exceptions # in PyTado library, so this can be removed - if "errors" in devices and devices["errors"]: + if isinstance(devices, dict) and devices.get("errors"): _LOGGER.error( "Error for home ID %s while updating devices: %s", self.home_id, diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index ea03c4b15f1..479b7f02024 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -145,7 +145,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle a clear cache service call.""" # clear the cache with suppress(FileNotFoundError): - if CONF_ADDRESS in call.data and call.data[CONF_ADDRESS]: + if call.data.get(CONF_ADDRESS): await hass.async_add_executor_job( os.unlink, hass.config.path( diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index cd3b3192520..5baaf614b01 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -138,8 +138,8 @@ async def async_setup_platform( message = await hass.async_add_executor_job(device.read, slot) _LOGGER.debug("Message received from device: '%s'", message) - if "code" in message and message["code"]: - log_msg = "Received command is: {}".format(message["code"]) + if code := message.get("code"): + log_msg = f"Received command is: {code}" _LOGGER.info(log_msg) persistent_notification.async_create( hass, log_msg, title="Xiaomi Miio Remote" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 38287eb6722..bf20a2d7f5f 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1106,7 +1106,7 @@ def empty_config_schema(domain: str) -> Callable[[dict], dict]: """Return a config schema which logs if there are configuration parameters.""" def validator(config: dict) -> dict: - if domain in config and config[domain]: + if config_domain := config.get(domain): get_integration_logger(__name__).error( ( "The %s integration does not support any configuration parameters, " @@ -1114,7 +1114,7 @@ def empty_config_schema(domain: str) -> Callable[[dict], dict]: "configuration." ), domain, - config[domain], + config_domain, ) return config diff --git a/pyproject.toml b/pyproject.toml index d3487d50a17..7e3038f6ee2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -705,6 +705,7 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF013", # PEP 484 prohibits implicit Optional "RUF018", # Avoid assignment expressions in assert statements + "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up "S102", # Use of exec detected "S103", # bad-file-permissions From f115525137765b1fae8e3b5106bb7c62fd2f27f4 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 24 Apr 2024 07:51:02 +0200 Subject: [PATCH 343/426] Migrate KNX notify service to entity platform (#115665) --- homeassistant/components/knx/__init__.py | 6 +- homeassistant/components/knx/manifest.json | 2 +- homeassistant/components/knx/notify.py | 59 ++++++++-- homeassistant/components/knx/repairs.py | 36 ++++++ homeassistant/components/knx/schema.py | 1 + homeassistant/components/knx/strings.json | 13 +++ tests/components/knx/test_notify.py | 129 +++++++++++++++------ tests/components/knx/test_repairs.py | 84 ++++++++++++++ 8 files changed, 281 insertions(+), 49 deletions(-) create mode 100644 homeassistant/components/knx/repairs.py create mode 100644 tests/components/knx/test_repairs.py diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index c84d53d6039..da68dc36a6d 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -197,11 +197,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: [ platform for platform in SUPPORTED_PLATFORMS - if platform in config and platform not in (Platform.SENSOR, Platform.NOTIFY) + if platform in config and platform is not Platform.SENSOR ], ) - # set up notify platform, no entry support for notify component yet + # set up notify service for backwards compatibility - remove 2024.11 if NotifySchema.PLATFORM in config: hass.async_create_task( discovery.async_load_platform( @@ -232,7 +232,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: platform for platform in SUPPORTED_PLATFORMS if platform in hass.data[DATA_KNX_CONFIG] - and platform not in (Platform.SENSOR, Platform.NOTIFY) + and platform is not Platform.SENSOR ], ], ) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index af0c6b8d01c..77f3db3f9f3 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -4,7 +4,7 @@ "after_dependencies": ["panel_custom"], "codeowners": ["@Julius2342", "@farmio", "@marvin-w"], "config_flow": true, - "dependencies": ["file_upload", "websocket_api"], + "dependencies": ["file_upload", "repairs", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/knx", "integration_type": "hub", "iot_class": "local_push", diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 74ae86dc5d0..e208e4fd646 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -1,4 +1,4 @@ -"""Support for KNX/IP notification services.""" +"""Support for KNX/IP notifications.""" from __future__ import annotations @@ -7,13 +7,16 @@ from typing import Any from xknx import XKNX from xknx.devices import Notification as XknxNotification -from homeassistant.components.notify import BaseNotificationService -from homeassistant.const import CONF_NAME, CONF_TYPE +from homeassistant import config_entries +from homeassistant.components.notify import BaseNotificationService, NotifyEntity +from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, CONF_TYPE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .schema import NotifySchema +from .knx_entity import KnxEntity +from .repairs import migrate_notify_issue async def async_get_service( @@ -25,16 +28,11 @@ async def async_get_service( if discovery_info is None: return None - if platform_config := hass.data[DATA_KNX_CONFIG].get(NotifySchema.PLATFORM): + if platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.NOTIFY): xknx: XKNX = hass.data[DOMAIN].xknx notification_devices = [ - XknxNotification( - xknx, - name=device_config[CONF_NAME], - group_address=device_config[KNX_ADDRESS], - value_type=device_config[CONF_TYPE], - ) + _create_notification_instance(xknx, device_config) for device_config in platform_config ] return KNXNotificationService(notification_devices) @@ -59,6 +57,7 @@ class KNXNotificationService(BaseNotificationService): async def async_send_message(self, message: str = "", **kwargs: Any) -> None: """Send a notification to knx bus.""" + migrate_notify_issue(self.hass) if "target" in kwargs: await self._async_send_to_device(message, kwargs["target"]) else: @@ -74,3 +73,41 @@ class KNXNotificationService(BaseNotificationService): for device in self.devices: if device.name in names: await device.set(message) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: config_entries.ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up notify(s) for KNX platform.""" + xknx: XKNX = hass.data[DOMAIN].xknx + config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] + + async_add_entities(KNXNotify(xknx, entity_config) for entity_config in config) + + +def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotification: + """Return a KNX Notification to be used within XKNX.""" + return XknxNotification( + xknx, + name=config[CONF_NAME], + group_address=config[KNX_ADDRESS], + value_type=config[CONF_TYPE], + ) + + +class KNXNotify(NotifyEntity, KnxEntity): + """Representation of a KNX notification entity.""" + + _device: XknxNotification + + def __init__(self, xknx: XKNX, config: ConfigType) -> None: + """Initialize a KNX notification.""" + super().__init__(_create_notification_instance(xknx, config)) + self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) + self._attr_unique_id = str(self._device.remote_value.group_address) + + async def async_send_message(self, message: str) -> None: + """Send a notification to knx bus.""" + await self._device.set(message) diff --git a/homeassistant/components/knx/repairs.py b/homeassistant/components/knx/repairs.py new file mode 100644 index 00000000000..f0a92850d36 --- /dev/null +++ b/homeassistant/components/knx/repairs.py @@ -0,0 +1,36 @@ +"""Repairs support for KNX.""" + +from __future__ import annotations + +from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir + +from .const import DOMAIN + + +@callback +def migrate_notify_issue(hass: HomeAssistant) -> None: + """Create issue for notify service deprecation.""" + ir.async_create_issue( + hass, + DOMAIN, + "migrate_notify", + breaks_in_ha_version="2024.11.0", + issue_domain=Platform.NOTIFY.value, + is_fixable=True, + is_persistent=True, + translation_key="migrate_notify", + severity=ir.IssueSeverity.WARNING, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert issue_id == "migrate_notify" + return ConfirmRepairFlow() diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 39670b4f92b..462605c3985 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -750,6 +750,7 @@ class NotifySchema(KNXPlatformSchema): vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator, vol.Required(KNX_ADDRESS): ga_validator, + vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, } ) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 39b96dddf8f..a69ba106ffd 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -384,5 +384,18 @@ "name": "[%key:common::action::reload%]", "description": "Reloads the KNX integration." } + }, + "issues": { + "migrate_notify": { + "title": "Migration of KNX notify service", + "fix_flow": { + "step": { + "confirm": { + "description": "The KNX `notify` service has been migrated. New `notify` entities are available now.\n\nUpdate any automations to use the new `notify.send_message` exposed by these new entities. When this is done, fix this issue and restart Home Assistant.", + "title": "Disable legacy KNX notify service" + } + } + } + } } } diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index d843c460c34..94f2d579fc8 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -1,5 +1,6 @@ """Test KNX notify.""" +from homeassistant.components import notify from homeassistant.components.knx.const import KNX_ADDRESS from homeassistant.components.knx.schema import NotifySchema from homeassistant.const import CONF_NAME, CONF_TYPE @@ -8,7 +9,9 @@ from homeassistant.core import HomeAssistant from .conftest import KNXTestKit -async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_legacy_notify_service_simple( + hass: HomeAssistant, knx: KNXTestKit +) -> None: """Test KNX notify can send to one device.""" await knx.setup_integration( { @@ -26,22 +29,7 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: await knx.assert_write( "1/0/0", - ( - 0x49, - 0x20, - 0x6C, - 0x6F, - 0x76, - 0x65, - 0x20, - 0x4B, - 0x4E, - 0x58, - 0x0, - 0x0, - 0x0, - 0x0, - ), + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), ) await hass.services.async_call( @@ -56,26 +44,11 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: await knx.assert_write( "1/0/0", - ( - 0x49, - 0x20, - 0x6C, - 0x6F, - 0x76, - 0x65, - 0x20, - 0x4B, - 0x4E, - 0x58, - 0x2C, - 0x20, - 0x62, - 0x75, - ), + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), ) -async def test_notify_multiple_sends_to_all_with_different_encodings( +async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodings( hass: HomeAssistant, knx: KNXTestKit ) -> None: """Test KNX notify `type` configuration.""" @@ -110,3 +83,91 @@ async def test_notify_multiple_sends_to_all_with_different_encodings( "1/0/1", (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), ) + + +async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX notify can send to one device.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.test", + notify.ATTR_MESSAGE: "I love KNX", + }, + ) + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.test", + notify.ATTR_MESSAGE: "I love KNX, but this text is too long for KNX, poor KNX", + }, + ) + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), + ) + + +async def test_notify_multiple_sends_with_different_encodings( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX notify `type` configuration.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: [ + { + CONF_NAME: "ASCII", + KNX_ADDRESS: "1/0/0", + CONF_TYPE: "string", + }, + { + CONF_NAME: "Latin-1", + KNX_ADDRESS: "1/0/1", + CONF_TYPE: "latin_1", + }, + ] + } + ) + message = {notify.ATTR_MESSAGE: "Gänsefüßchen"} + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.ascii", + **message, + }, + ) + await knx.assert_write( + "1/0/0", + # "G?nsef??chen" + (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0), + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.latin_1", + **message, + }, + ) + await knx.assert_write( + "1/0/1", + (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), + ) diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py new file mode 100644 index 00000000000..4ad06e0addb --- /dev/null +++ b/tests/components/knx/test_repairs.py @@ -0,0 +1,84 @@ +"""Test repairs for KNX integration.""" + +from http import HTTPStatus + +from homeassistant.components.knx.const import DOMAIN, KNX_ADDRESS +from homeassistant.components.knx.schema import NotifySchema +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir + +from .conftest import KNXTestKit + +from tests.typing import ClientSessionGenerator + + +async def test_knx_notify_service_issue( + hass: HomeAssistant, + knx: KNXTestKit, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service still works before migration and repair flow is triggered.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + http_client = await hass_client() + + # Assert no issue is present + assert len(issue_registry.issues) == 0 + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, NOTIFY_DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + NOTIFY_DOMAIN, + service_data={"message": "It is too cold!", "target": "test"}, + blocking=True, + ) + await knx.assert_write( + "1/0/0", + (73, 116, 32, 105, 115, 32, 116, 111, 111, 32, 99, 111, 108, 100), + ) + + # Assert the issue is present + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + + # Test confirm step in repair flow + resp = await http_client.post( + RepairsFlowIndexView.url, + json={"handler": DOMAIN, "issue_id": "migrate_notify"}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + resp = await http_client.post( + RepairsFlowResourceView.url.format(flow_id=flow_id), + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 0 From d8cca482b3726f2ad9f0914f1efbc14754d90a70 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 24 Apr 2024 07:52:14 +0200 Subject: [PATCH 344/426] Add reconfigure flow to AVM Fritz!Tools (#116057) add reconfigure flow --- homeassistant/components/fritz/config_flow.py | 84 ++++++++- homeassistant/components/fritz/strings.json | 16 +- tests/components/fritz/test_config_flow.py | 177 +++++++++++++++++- 3 files changed, 270 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index 1cfa3af39fb..fdafd486b29 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -138,6 +138,12 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): }, ) + def _determine_port(self, user_input: dict[str, Any]) -> int: + """Determine port from user_input.""" + if port := user_input.get(CONF_PORT): + return int(port) + return DEFAULT_HTTPS_PORT if user_input[CONF_SSL] else DEFAULT_HTTP_PORT + async def async_step_ssdp( self, discovery_info: ssdp.SsdpServiceInfo ) -> ConfigFlowResult: @@ -189,7 +195,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] self._use_tls = user_input[CONF_SSL] - self._port = DEFAULT_HTTPS_PORT if self._use_tls else DEFAULT_HTTP_PORT + self._port = self._determine_port(user_input) error = await self.hass.async_add_executor_job(self.fritz_tools_init) @@ -252,10 +258,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._password = user_input[CONF_PASSWORD] self._use_tls = user_input[CONF_SSL] - if (port := user_input.get(CONF_PORT)) is None: - self._port = DEFAULT_HTTPS_PORT if self._use_tls else DEFAULT_HTTP_PORT - else: - self._port = port + self._port = self._determine_port(user_input) if not (error := await self.hass.async_add_executor_job(self.fritz_tools_init)): self._name = self._model @@ -329,6 +332,77 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.config_entries.async_reload(self._entry.entry_id) return self.async_abort(reason="reauth_successful") + async def async_step_reconfigure(self, _: Mapping[str, Any]) -> ConfigFlowResult: + """Handle reconfigure flow .""" + self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert self._entry + self._host = self._entry.data[CONF_HOST] + self._port = self._entry.data[CONF_PORT] + self._username = self._entry.data[CONF_USERNAME] + self._password = self._entry.data[CONF_PASSWORD] + self._use_tls = self._entry.data.get(CONF_SSL, DEFAULT_SSL) + + return await self.async_step_reconfigure_confirm() + + def _show_setup_form_reconfigure_confirm( + self, user_input: dict[str, Any], errors: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Show the reconfigure form to the user.""" + advanced_data_schema = {} + if self.show_advanced_options: + advanced_data_schema = { + vol.Optional(CONF_PORT, default=user_input[CONF_PORT]): vol.Coerce(int), + } + + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, + **advanced_data_schema, + vol.Required(CONF_SSL, default=user_input[CONF_SSL]): bool, + } + ), + description_placeholders={"host": self._host}, + errors=errors or {}, + ) + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfigure flow.""" + if user_input is None: + return self._show_setup_form_reconfigure_confirm( + { + CONF_HOST: self._host, + CONF_PORT: self._port, + CONF_SSL: self._use_tls, + } + ) + + self._host = user_input[CONF_HOST] + self._use_tls = user_input[CONF_SSL] + self._port = self._determine_port(user_input) + + if error := await self.hass.async_add_executor_job(self.fritz_tools_init): + return self._show_setup_form_reconfigure_confirm( + user_input={**user_input, CONF_PORT: self._port}, errors={"base": error} + ) + + assert isinstance(self._entry, ConfigEntry) + self.hass.config_entries.async_update_entry( + self._entry, + data={ + CONF_HOST: self._host, + CONF_PASSWORD: self._password, + CONF_PORT: self._port, + CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, + }, + ) + await self.hass.config_entries.async_reload(self._entry.entry_id) + return self.async_abort(reason="reconfigure_successful") + class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle an options flow.""" diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 4899edb6938..a96c3b8ac28 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -18,6 +18,19 @@ "password": "[%key:common::config_flow::data::password%]" } }, + "reconfigure_confirm": { + "title": "Updating FRITZ!Box Tools - configuration", + "description": "Update FRITZ!Box Tools configuration for: {host}.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", + "ssl": "[%key:common::config_flow::data::ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router.", + "port": "Leave it empty to use the default port." + } + }, "user": { "title": "[%key:component::fritz::config::step::confirm::title%]", "description": "Set up FRITZ!Box Tools to control your FRITZ!Box.\nMinimum needed: username, password.", @@ -38,7 +51,8 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "ignore_ip6_link_local": "IPv6 link local address is not supported.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 64bf3cd9064..f87fbe722cd 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -23,7 +23,12 @@ from homeassistant.components.fritz.const import ( FRITZ_AUTH_EXCEPTIONS, ) from homeassistant.components.ssdp import ATTR_UPNP_UDN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -405,6 +410,176 @@ async def test_reauth_not_successful( assert result["errors"]["base"] == error +@pytest.mark.parametrize( + ("show_advanced_options", "user_input", "expected_config"), + [ + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 49000, CONF_SSL: False}, + {CONF_HOST: "host_a", CONF_PORT: 49000, CONF_SSL: False}, + ), + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 49443, CONF_SSL: True}, + {CONF_HOST: "host_a", CONF_PORT: 49443, CONF_SSL: True}, + ), + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 12345, CONF_SSL: True}, + {CONF_HOST: "host_a", CONF_PORT: 12345, CONF_SSL: True}, + ), + ( + False, + {CONF_HOST: "host_b", CONF_SSL: False}, + {CONF_HOST: "host_b", CONF_PORT: 49000, CONF_SSL: False}, + ), + ( + False, + {CONF_HOST: "host_b", CONF_SSL: True}, + {CONF_HOST: "host_b", CONF_PORT: 49443, CONF_SSL: True}, + ), + ], +) +async def test_reconfigure_successful( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input: dict, + expected_config: dict, +) -> None: + """Test starting a reconfigure flow.""" + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.fritz.config_flow.FritzConnection", + side_effect=fc_class_mock, + ), + patch( + "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", + return_value=MOCK_FIRMWARE_INFO, + ), + patch( + "homeassistant.components.fritz.async_setup_entry", + ) as mock_setup_entry, + patch( + "requests.get", + ) as mock_request_get, + patch( + "requests.post", + ) as mock_request_post, + ): + mock_request_get.return_value.status_code = 200 + mock_request_get.return_value.content = MOCK_REQUEST + mock_request_post.return_value.status_code = 200 + mock_request_post.return_value.text = MOCK_REQUEST + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + "show_advanced_options": show_advanced_options, + }, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data == { + **expected_config, + CONF_USERNAME: "fake_user", + CONF_PASSWORD: "fake_pass", + } + + assert mock_setup_entry.called + + +async def test_reconfigure_not_successful( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, +) -> None: + """Test starting a reconfigure flow but no connection found.""" + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.fritz.config_flow.FritzConnection", + side_effect=[FritzConnectionException, fc_class_mock], + ), + patch( + "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", + return_value=MOCK_FIRMWARE_INFO, + ), + patch( + "homeassistant.components.fritz.async_setup_entry", + ), + patch( + "requests.get", + ) as mock_request_get, + patch( + "requests.post", + ) as mock_request_post, + ): + mock_request_get.return_value.status_code = 200 + mock_request_get.return_value.content = MOCK_REQUEST + mock_request_post.return_value.status_code = 200 + mock_request_post.return_value.text = MOCK_REQUEST + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "fake_host", + CONF_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"]["base"] == ERROR_CANNOT_CONNECT + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "fake_host", + CONF_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data == { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49000, + CONF_SSL: False, + } + + async def test_ssdp_already_configured( hass: HomeAssistant, fc_class_mock, mock_get_source_ip ) -> None: From 44208a5be0eed3429fb03a5606f16556fb65f683 Mon Sep 17 00:00:00 2001 From: osohotwateriot <102795312+osohotwateriot@users.noreply.github.com> Date: Wed, 24 Apr 2024 09:19:26 +0300 Subject: [PATCH 345/426] Add OSO Energy sensors (#108226) * Add OSO Energy sensors * Fix comments * Fixes after review * Fix sensor names and translations * Fixes after review * Fix validation errors * Fixes after review * Remove profile sensor --- .coveragerc | 1 + .../components/osoenergy/__init__.py | 24 ++- homeassistant/components/osoenergy/sensor.py | 151 ++++++++++++++++++ .../components/osoenergy/strings.json | 49 +++++- .../components/osoenergy/water_heater.py | 52 +++--- 5 files changed, 241 insertions(+), 36 deletions(-) create mode 100644 homeassistant/components/osoenergy/sensor.py diff --git a/.coveragerc b/.coveragerc index 9eb32f7cda8..6f382bcb780 100644 --- a/.coveragerc +++ b/.coveragerc @@ -986,6 +986,7 @@ omit = homeassistant/components/orvibo/switch.py homeassistant/components/osoenergy/__init__.py homeassistant/components/osoenergy/const.py + homeassistant/components/osoenergy/sensor.py homeassistant/components/osoenergy/water_heater.py homeassistant/components/osramlightify/light.py homeassistant/components/otp/sensor.py diff --git a/homeassistant/components/osoenergy/__init__.py b/homeassistant/components/osoenergy/__init__.py index 48ea01e8bb8..20ff22cea23 100644 --- a/homeassistant/components/osoenergy/__init__.py +++ b/homeassistant/components/osoenergy/__init__.py @@ -16,18 +16,25 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity from .const import DOMAIN -_T = TypeVar( - "_T", OSOEnergyBinarySensorData, OSOEnergySensorData, OSOEnergyWaterHeaterData +_OSOEnergyT = TypeVar( + "_OSOEnergyT", + OSOEnergyBinarySensorData, + OSOEnergySensorData, + OSOEnergyWaterHeaterData, ) +MANUFACTURER = "OSO Energy" PLATFORMS = [ + Platform.SENSOR, Platform.WATER_HEATER, ] PLATFORM_LOOKUP = { + Platform.SENSOR: "sensor", Platform.WATER_HEATER: "water_heater", } @@ -70,13 +77,18 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -class OSOEnergyEntity(Entity, Generic[_T]): +class OSOEnergyEntity(Entity, Generic[_OSOEnergyT]): """Initiate OSO Energy Base Class.""" _attr_has_entity_name = True - def __init__(self, osoenergy: OSOEnergy, osoenergy_device: _T) -> None: + def __init__(self, osoenergy: OSOEnergy, entity_data: _OSOEnergyT) -> None: """Initialize the instance.""" self.osoenergy = osoenergy - self.device = osoenergy_device - self._attr_unique_id = osoenergy_device.device_id + self.entity_data = entity_data + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entity_data.device_id)}, + manufacturer=MANUFACTURER, + model=entity_data.device_type, + name=entity_data.device_name, + ) diff --git a/homeassistant/components/osoenergy/sensor.py b/homeassistant/components/osoenergy/sensor.py new file mode 100644 index 00000000000..0be6ad83281 --- /dev/null +++ b/homeassistant/components/osoenergy/sensor.py @@ -0,0 +1,151 @@ +"""Support for OSO Energy sensors.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from apyosoenergyapi import OSOEnergy +from apyosoenergyapi.helper.const import OSOEnergySensorData + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfVolume +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import OSOEnergyEntity +from .const import DOMAIN + + +@dataclass(frozen=True, kw_only=True) +class OSOEnergySensorEntityDescription(SensorEntityDescription): + """Class describing OSO Energy heater sensor entities.""" + + value_fn: Callable[[OSOEnergy], StateType] + + +SENSOR_TYPES: dict[str, OSOEnergySensorEntityDescription] = { + "heater_mode": OSOEnergySensorEntityDescription( + key="heater_mode", + translation_key="heater_mode", + device_class=SensorDeviceClass.ENUM, + options=[ + "auto", + "manual", + "off", + "legionella", + "powersave", + "extraenergy", + "voltage", + "ffr", + ], + value_fn=lambda entity_data: entity_data.state.lower(), + ), + "optimization_mode": OSOEnergySensorEntityDescription( + key="optimization_mode", + translation_key="optimization_mode", + device_class=SensorDeviceClass.ENUM, + options=["off", "oso", "gridcompany", "smartcompany", "advanced"], + value_fn=lambda entity_data: entity_data.state.lower(), + ), + "power_load": OSOEnergySensorEntityDescription( + key="power_load", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda entity_data: entity_data.state, + ), + "tapping_capacity": OSOEnergySensorEntityDescription( + key="tapping_capacity", + translation_key="tapping_capacity", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda entity_data: entity_data.state, + ), + "capacity_mixed_water_40": OSOEnergySensorEntityDescription( + key="capacity_mixed_water_40", + translation_key="capacity_mixed_water_40", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_min": OSOEnergySensorEntityDescription( + key="v40_min", + translation_key="v40_min", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_level_min": OSOEnergySensorEntityDescription( + key="v40_level_min", + translation_key="v40_level_min", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_level_max": OSOEnergySensorEntityDescription( + key="v40_level_max", + translation_key="v40_level_max", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "volume": OSOEnergySensorEntityDescription( + key="volume", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up OSO Energy sensor.""" + osoenergy = hass.data[DOMAIN][entry.entry_id] + devices = osoenergy.session.device_list.get("sensor") + entities = [] + if devices: + for dev in devices: + sensor_type = dev.osoEnergyType.lower() + if sensor_type in SENSOR_TYPES: + entities.append( + OSOEnergySensor(osoenergy, SENSOR_TYPES[sensor_type], dev) + ) + + async_add_entities(entities, True) + + +class OSOEnergySensor(OSOEnergyEntity[OSOEnergySensorData], SensorEntity): + """OSO Energy Sensor Entity.""" + + entity_description: OSOEnergySensorEntityDescription + + def __init__( + self, + instance: OSOEnergy, + description: OSOEnergySensorEntityDescription, + entity_data: OSOEnergySensorData, + ) -> None: + """Initialize the OSO Energy sensor.""" + super().__init__(instance, entity_data) + + device_id = entity_data.device_id + self._attr_unique_id = f"{device_id}_{description.key}" + self.entity_description = description + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.entity_data) + + async def async_update(self) -> None: + """Update all data for OSO Energy.""" + await self.osoenergy.session.update_data() + self.entity_data = await self.osoenergy.sensor.get_sensor(self.entity_data) diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index a45482bf030..5313f1d6565 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -17,13 +17,56 @@ } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "tapping_capacity": { + "name": "Tapping capacity" + }, + "capacity_mixed_water_40": { + "name": "Capacity mixed water 40°C" + }, + "v40_min": { + "name": "Mixed water at 40°C" + }, + "v40_level_min": { + "name": "Minimum level of mixed water at 40°C" + }, + "v40_level_max": { + "name": "Maximum level of mixed water at 40°C" + }, + "heater_mode": { + "name": "Heater mode", + "state": { + "auto": "Auto", + "extraenergy": "Extra energy", + "ffr": "Fast frequency reserve", + "legionella": "Legionella", + "manual": "Manual", + "off": "Off", + "powersave": "Power save", + "voltage": "Voltage" + } + }, + "optimization_mode": { + "name": "Optimization mode", + "state": { + "advanced": "Advanced", + "gridcompany": "Grid company", + "off": "Off", + "oso": "OSO", + "smartcompany": "Smart company" + } + }, + "profile": { + "name": "Profile local" + } + } } } diff --git a/homeassistant/components/osoenergy/water_heater.py b/homeassistant/components/osoenergy/water_heater.py index eaf54a9f9a4..b7fb2ba16e6 100644 --- a/homeassistant/components/osoenergy/water_heater.py +++ b/homeassistant/components/osoenergy/water_heater.py @@ -2,6 +2,7 @@ from typing import Any +from apyosoenergyapi import OSOEnergy from apyosoenergyapi.helper.const import OSOEnergyWaterHeaterData from homeassistant.components.water_heater import ( @@ -15,7 +16,6 @@ from homeassistant.components.water_heater import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OSOEnergyEntity @@ -34,9 +34,6 @@ CURRENT_OPERATION_MAP: dict[str, Any] = { "extraenergy": STATE_HIGH_DEMAND, }, } -HEATER_MIN_TEMP = 10 -HEATER_MAX_TEMP = 80 -MANUFACTURER = "OSO Energy" async def async_setup_entry( @@ -59,30 +56,29 @@ class OSOEnergyWaterHeater( _attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - @property - def device_info(self) -> DeviceInfo: - """Return device information.""" - return DeviceInfo( - identifiers={(DOMAIN, self.device.device_id)}, - manufacturer=MANUFACTURER, - model=self.device.device_type, - name=self.device.device_name, - ) + def __init__( + self, + instance: OSOEnergy, + entity_data: OSOEnergyWaterHeaterData, + ) -> None: + """Initialize the OSO Energy water heater.""" + super().__init__(instance, entity_data) + self._attr_unique_id = entity_data.device_id @property def available(self) -> bool: """Return if the device is available.""" - return self.device.available + return self.entity_data.available @property def current_operation(self) -> str: """Return current operation.""" - status = self.device.current_operation + status = self.entity_data.current_operation if status == "off": return STATE_OFF - optimization_mode = self.device.optimization_mode.lower() - heater_mode = self.device.heater_mode.lower() + optimization_mode = self.entity_data.optimization_mode.lower() + heater_mode = self.entity_data.heater_mode.lower() if optimization_mode in CURRENT_OPERATION_MAP: return CURRENT_OPERATION_MAP[optimization_mode].get( heater_mode, STATE_ELECTRIC @@ -93,49 +89,51 @@ class OSOEnergyWaterHeater( @property def current_temperature(self) -> float: """Return the current temperature of the heater.""" - return self.device.current_temperature + return self.entity_data.current_temperature @property def target_temperature(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature + return self.entity_data.target_temperature @property def target_temperature_high(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature_high + return self.entity_data.target_temperature_high @property def target_temperature_low(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature_low + return self.entity_data.target_temperature_low @property def min_temp(self) -> float: """Return the minimum temperature.""" - return self.device.min_temperature + return self.entity_data.min_temperature @property def max_temp(self) -> float: """Return the maximum temperature.""" - return self.device.max_temperature + return self.entity_data.max_temperature async def async_turn_on(self, **kwargs) -> None: """Turn on hotwater.""" - await self.osoenergy.hotwater.turn_on(self.device, True) + await self.osoenergy.hotwater.turn_on(self.entity_data, True) async def async_turn_off(self, **kwargs) -> None: """Turn off hotwater.""" - await self.osoenergy.hotwater.turn_off(self.device, True) + await self.osoenergy.hotwater.turn_off(self.entity_data, True) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" target_temperature = int(kwargs.get("temperature", self.target_temperature)) profile = [target_temperature] * 24 - await self.osoenergy.hotwater.set_profile(self.device, profile) + await self.osoenergy.hotwater.set_profile(self.entity_data, profile) async def async_update(self) -> None: """Update all Node data from Hive.""" await self.osoenergy.session.update_data() - self.device = await self.osoenergy.hotwater.get_water_heater(self.device) + self.entity_data = await self.osoenergy.hotwater.get_water_heater( + self.entity_data + ) From 474a1a3d94d8d8da9edc62ec9209a7c00e7e0cf5 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Wed, 24 Apr 2024 09:46:55 +0200 Subject: [PATCH 346/426] Use display_precision if suggested_display_precision is None (#110270) Co-authored-by: Richard Co-authored-by: Erik Montnemery --- homeassistant/components/sensor/__init__.py | 16 ++++++++------ tests/components/sensor/test_init.py | 24 +++++++++++++++++++++ 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 1d06e1a24c4..ad6b3454ea9 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -747,13 +747,15 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return value - def _suggested_precision_or_none(self) -> int | None: - """Return suggested display precision, or None if not set.""" + def _display_precision_or_none(self) -> int | None: + """Return display precision, or None if not set.""" assert self.registry_entry - if (sensor_options := self.registry_entry.options.get(DOMAIN)) and ( - precision := sensor_options.get("suggested_display_precision") - ) is not None: - return cast(int, precision) + if not (sensor_options := self.registry_entry.options.get(DOMAIN)): + return None + + for option in ("display_precision", "suggested_display_precision"): + if (precision := sensor_options.get(option)) is not None: + return cast(int, precision) return None def _update_suggested_precision(self) -> None: @@ -835,7 +837,7 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Called when the entity registry entry has been updated and before the sensor is added to the state machine. """ - self._sensor_option_display_precision = self._suggested_precision_or_none() + self._sensor_option_display_precision = self._display_precision_or_none() assert self.registry_entry if ( sensor_options := self.registry_entry.options.get(f"{DOMAIN}.private") diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 9e8e401ea46..74fd81188cd 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -1146,6 +1146,14 @@ async def test_unit_conversion_priority_precision( suggested_display_precision=suggested_precision, suggested_unit_of_measurement=suggested_unit, ) + entity4 = MockSensor( + name="Test", + device_class=device_class, + native_unit_of_measurement=native_unit, + native_value=str(native_value), + suggested_display_precision=None, + unique_id="very_unique_4", + ) setup_test_component_platform( hass, sensor.DOMAIN, @@ -1154,6 +1162,7 @@ async def test_unit_conversion_priority_precision( entity1, entity2, entity3, + entity4, ], ) @@ -1230,6 +1239,21 @@ async def test_unit_conversion_priority_precision( round(custom_state, 4) ) + # Set a display_precision without having suggested_display_precision + entity_registry.async_update_entity_options( + entity4.entity_id, + "sensor", + {"display_precision": 4}, + ) + entry4 = entity_registry.async_get(entity4.entity_id) + assert "suggested_display_precision" not in entry4.options["sensor"] + assert entry4.options["sensor"]["display_precision"] == 4 + await hass.async_block_till_done() + state = hass.states.get(entity4.entity_id) + assert float(async_rounded_state(hass, entity4.entity_id, state)) == pytest.approx( + round(automatic_state, 4) + ) + @pytest.mark.parametrize( ( From ec377ce6657f01fcb9fb9b2b285a7f3459879272 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 24 Apr 2024 09:49:10 +0200 Subject: [PATCH 347/426] Bump deebot-client to 7.1.0 (#116082) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 2e088024215..aad04d9ec87 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.9", "deebot-client==7.0.0"] + "requirements": ["py-sucks==0.9.9", "deebot-client==7.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index df688e6e00f..42f716f58e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -697,7 +697,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==7.0.0 +deebot-client==7.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 60e54a81780..11533275029 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -575,7 +575,7 @@ dbus-fast==2.21.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==7.0.0 +deebot-client==7.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From b520efb87ae422faba8d2fcddecd201865fe1df0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 09:56:59 +0200 Subject: [PATCH 348/426] Small speed up to async_track_event (#116083) --- homeassistant/helpers/event.py | 36 +++++++++++++--------------------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 7fae0976686..5cffe992c0d 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -3,11 +3,12 @@ from __future__ import annotations import asyncio +from collections import defaultdict from collections.abc import Callable, Coroutine, Iterable, Mapping, Sequence import copy from dataclasses import dataclass from datetime import datetime, timedelta -import functools as ft +from functools import partial, wraps import logging from random import randint import time @@ -161,7 +162,7 @@ def threaded_listener_factory( ) -> Callable[Concatenate[HomeAssistant, _P], CALLBACK_TYPE]: """Convert an async event helper to a threaded one.""" - @ft.wraps(async_factory) + @wraps(async_factory) def factory( hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs ) -> CALLBACK_TYPE: @@ -170,7 +171,7 @@ def threaded_listener_factory( raise TypeError("First parameter needs to be a hass instance") async_remove = run_callback_threadsafe( - hass.loop, ft.partial(async_factory, hass, *args, **kwargs) + hass.loop, partial(async_factory, hass, *args, **kwargs) ).result() def remove() -> None: @@ -409,19 +410,16 @@ def _async_track_event( return _remove_empty_listener hass_data = hass.data - callbacks_key = tracker.callbacks_key - - callbacks: dict[str, list[HassJob[[Event[_TypedDictT]], Any]]] | None - if not (callbacks := hass_data.get(callbacks_key)): - callbacks = hass_data[callbacks_key] = {} + callbacks: defaultdict[str, list[HassJob[[Event[_TypedDictT]], Any]]] | None + if not (callbacks := hass_data.get(tracker.callbacks_key)): + callbacks = hass_data[tracker.callbacks_key] = defaultdict(list) listeners_key = tracker.listeners_key - - if listeners_key not in hass_data: - hass_data[listeners_key] = hass.bus.async_listen( + if tracker.listeners_key not in hass_data: + hass_data[tracker.listeners_key] = hass.bus.async_listen( tracker.event_type, - ft.partial(tracker.dispatcher_callable, hass, callbacks), - event_filter=ft.partial(tracker.filter_callable, hass, callbacks), + partial(tracker.dispatcher_callable, hass, callbacks), + event_filter=partial(tracker.filter_callable, hass, callbacks), ) job = HassJob(action, f"track {tracker.event_type} event {keys}", job_type=job_type) @@ -432,19 +430,13 @@ def _async_track_event( # here because this function gets called ~20000 times # during startup, and we want to avoid the overhead of # creating empty lists and throwing them away. - if callback_list := callbacks.get(keys): - callback_list.append(job) - else: - callbacks[keys] = [job] + callbacks[keys].append(job) keys = [keys] else: for key in keys: - if callback_list := callbacks.get(key): - callback_list.append(job) - else: - callbacks[key] = [job] + callbacks[key].append(job) - return ft.partial(_remove_listener, hass, listeners_key, keys, job, callbacks) + return partial(_remove_listener, hass, listeners_key, keys, job, callbacks) @callback From a4829330f6cdb3159d2ec905120ea284ae1c86c8 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 24 Apr 2024 09:57:38 +0200 Subject: [PATCH 349/426] Add strict connection for cloud (#115814) Co-authored-by: Martin Hjelmare --- homeassistant/components/cloud/__init__.py | 100 +++++- homeassistant/components/cloud/client.py | 1 + homeassistant/components/cloud/const.py | 1 + homeassistant/components/cloud/http_api.py | 6 +- homeassistant/components/cloud/icons.json | 1 + homeassistant/components/cloud/manifest.json | 2 +- homeassistant/components/cloud/prefs.py | 18 +- homeassistant/components/cloud/strings.json | 12 + homeassistant/components/cloud/util.py | 15 + homeassistant/components/http/__init__.py | 9 +- homeassistant/components/http/auth.py | 100 ++++-- homeassistant/components/http/const.py | 2 + homeassistant/helpers/network.py | 12 +- script/hassfest/dependencies.py | 1 + tests/components/cloud/test_client.py | 2 + tests/components/cloud/test_http_api.py | 5 + tests/components/cloud/test_init.py | 84 ++++- tests/components/cloud/test_prefs.py | 25 +- .../cloud/test_strict_connection.py | 294 ++++++++++++++++++ tests/helpers/test_network.py | 12 + 20 files changed, 644 insertions(+), 58 deletions(-) create mode 100644 homeassistant/components/cloud/util.py create mode 100644 tests/components/cloud/test_strict_connection.py diff --git a/homeassistant/components/cloud/__init__.py b/homeassistant/components/cloud/__init__.py index 80f9d9f9368..2552fe4bf5c 100644 --- a/homeassistant/components/cloud/__init__.py +++ b/homeassistant/components/cloud/__init__.py @@ -7,11 +7,14 @@ from collections.abc import Awaitable, Callable from datetime import datetime, timedelta from enum import Enum from typing import cast +from urllib.parse import quote_plus, urljoin from hass_nabucasa import Cloud import voluptuous as vol -from homeassistant.components import alexa, google_assistant +from homeassistant.components import alexa, google_assistant, http +from homeassistant.components.auth import STRICT_CONNECTION_URL +from homeassistant.components.http.auth import async_sign_path from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry from homeassistant.const import ( CONF_DESCRIPTION, @@ -21,8 +24,21 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, Platform, ) -from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.core import ( + Event, + HassJob, + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceValidationError, + Unauthorized, + UnknownUser, +) from homeassistant.helpers import config_validation as cv, entityfilter from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.discovery import async_load_platform @@ -31,6 +47,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass @@ -265,18 +282,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) _remote_handle_prefs_updated(cloud) - - async def _service_handler(service: ServiceCall) -> None: - """Handle service for cloud.""" - if service.service == SERVICE_REMOTE_CONNECT: - await prefs.async_update(remote_enabled=True) - elif service.service == SERVICE_REMOTE_DISCONNECT: - await prefs.async_update(remote_enabled=False) - - async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler) - async_register_admin_service( - hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler - ) + _setup_services(hass, prefs) async def async_startup_repairs(_: datetime) -> None: """Create repair issues after startup.""" @@ -395,3 +401,67 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +@callback +def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None: + """Set up services for cloud component.""" + + async def _service_handler(service: ServiceCall) -> None: + """Handle service for cloud.""" + if service.service == SERVICE_REMOTE_CONNECT: + await prefs.async_update(remote_enabled=True) + elif service.service == SERVICE_REMOTE_DISCONNECT: + await prefs.async_update(remote_enabled=False) + + async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler) + async_register_admin_service( + hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler + ) + + async def create_temporary_strict_connection_url( + call: ServiceCall, + ) -> ServiceResponse: + """Create a strict connection url and return it.""" + # Copied form homeassistant/helpers/service.py#_async_admin_handler + # as the helper supports no responses yet + if call.context.user_id: + user = await hass.auth.async_get_user(call.context.user_id) + if user is None: + raise UnknownUser(context=call.context) + if not user.is_admin: + raise Unauthorized(context=call.context) + + if prefs.strict_connection is http.const.StrictConnectionMode.DISABLED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="strict_connection_not_enabled", + ) + + try: + url = get_url(hass, require_cloud=True) + except NoURLAvailableError as ex: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_url_available", + ) from ex + + path = async_sign_path( + hass, + STRICT_CONNECTION_URL, + timedelta(hours=1), + use_content_user=True, + ) + url = urljoin(url, path) + + return { + "url": f"https://login.home-assistant.io?u={quote_plus(url)}", + "direct_url": url, + } + + hass.services.async_register( + DOMAIN, + "create_temporary_strict_connection_url", + create_temporary_strict_connection_url, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index 01c8de77156..c4d1c1dec60 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -250,6 +250,7 @@ class CloudClient(Interface): "enabled": self._prefs.remote_enabled, "instance_domain": self.cloud.remote.instance_domain, "alias": self.cloud.remote.alias, + "strict_connection": self._prefs.strict_connection, }, "version": HA_VERSION, "instance_id": self.prefs.instance_id, diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 2c58dd57340..8b68eefc443 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -33,6 +33,7 @@ PREF_GOOGLE_SETTINGS_VERSION = "google_settings_version" PREF_TTS_DEFAULT_VOICE = "tts_default_voice" PREF_GOOGLE_CONNECTED = "google_connected" PREF_REMOTE_ALLOW_REMOTE_ENABLE = "remote_allow_remote_enable" +PREF_STRICT_CONNECTION = "strict_connection" DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "JennyNeural") DEFAULT_DISABLE_2FA = False DEFAULT_ALEXA_REPORT_STATE = True diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index b577e9de0d4..29185191a20 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -19,7 +19,7 @@ from hass_nabucasa.const import STATE_DISCONNECTED from hass_nabucasa.voice import TTS_VOICES import voluptuous as vol -from homeassistant.components import websocket_api +from homeassistant.components import http, websocket_api from homeassistant.components.alexa import ( entities as alexa_entities, errors as alexa_errors, @@ -46,6 +46,7 @@ from .const import ( PREF_GOOGLE_REPORT_STATE, PREF_GOOGLE_SECURE_DEVICES_PIN, PREF_REMOTE_ALLOW_REMOTE_ENABLE, + PREF_STRICT_CONNECTION, PREF_TTS_DEFAULT_VOICE, REQUEST_TIMEOUT, ) @@ -452,6 +453,9 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: vol.Coerce(tuple), validate_language_voice ), vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, + vol.Optional(PREF_STRICT_CONNECTION): vol.Coerce( + http.const.StrictConnectionMode + ), } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/icons.json b/homeassistant/components/cloud/icons.json index 06ee7eb2f19..1a8593388b4 100644 --- a/homeassistant/components/cloud/icons.json +++ b/homeassistant/components/cloud/icons.json @@ -1,5 +1,6 @@ { "services": { + "create_temporary_strict_connection_url": "mdi:login-variant", "remote_connect": "mdi:cloud", "remote_disconnect": "mdi:cloud-off" } diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 49a3fc0bf5c..0d2ee546ad8 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -3,7 +3,7 @@ "name": "Home Assistant Cloud", "after_dependencies": ["assist_pipeline", "google_assistant", "alexa"], "codeowners": ["@home-assistant/cloud"], - "dependencies": ["http", "repairs", "webhook"], + "dependencies": ["auth", "http", "repairs", "webhook"], "documentation": "https://www.home-assistant.io/integrations/cloud", "integration_type": "system", "iot_class": "cloud_push", diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index af4e68194d6..9fce615128b 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -10,7 +10,7 @@ from hass_nabucasa.voice import MAP_VOICE from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.auth.models import User -from homeassistant.components import webhook +from homeassistant.components import http, webhook from homeassistant.components.google_assistant.http import ( async_get_users as async_get_google_assistant_users, ) @@ -44,6 +44,7 @@ from .const import ( PREF_INSTANCE_ID, PREF_REMOTE_ALLOW_REMOTE_ENABLE, PREF_REMOTE_DOMAIN, + PREF_STRICT_CONNECTION, PREF_TTS_DEFAULT_VOICE, PREF_USERNAME, ) @@ -176,6 +177,7 @@ class CloudPreferences: google_settings_version: int | UndefinedType = UNDEFINED, google_connected: bool | UndefinedType = UNDEFINED, remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + strict_connection: http.const.StrictConnectionMode | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -195,6 +197,7 @@ class CloudPreferences: (PREF_REMOTE_DOMAIN, remote_domain), (PREF_GOOGLE_CONNECTED, google_connected), (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_STRICT_CONNECTION, strict_connection), ): if value is not UNDEFINED: prefs[key] = value @@ -242,6 +245,7 @@ class CloudPreferences: PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin, PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable, PREF_TTS_DEFAULT_VOICE: self.tts_default_voice, + PREF_STRICT_CONNECTION: self.strict_connection, } @property @@ -358,6 +362,17 @@ class CloudPreferences: """ return self._prefs.get(PREF_TTS_DEFAULT_VOICE, DEFAULT_TTS_DEFAULT_VOICE) # type: ignore[no-any-return] + @property + def strict_connection(self) -> http.const.StrictConnectionMode: + """Return the strict connection mode.""" + mode = self._prefs.get( + PREF_STRICT_CONNECTION, http.const.StrictConnectionMode.DISABLED + ) + + if not isinstance(mode, http.const.StrictConnectionMode): + mode = http.const.StrictConnectionMode(mode) + return mode # type: ignore[no-any-return] + async def get_cloud_user(self) -> str: """Return ID of Home Assistant Cloud system user.""" user = await self._load_cloud_user() @@ -415,4 +430,5 @@ class CloudPreferences: PREF_REMOTE_DOMAIN: None, PREF_REMOTE_ALLOW_REMOTE_ENABLE: True, PREF_USERNAME: username, + PREF_STRICT_CONNECTION: http.const.StrictConnectionMode.DISABLED, } diff --git a/homeassistant/components/cloud/strings.json b/homeassistant/components/cloud/strings.json index 16a82a27c1a..1fec87235da 100644 --- a/homeassistant/components/cloud/strings.json +++ b/homeassistant/components/cloud/strings.json @@ -5,6 +5,14 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, + "exceptions": { + "strict_connection_not_enabled": { + "message": "Strict connection is not enabled for cloud requests" + }, + "no_url_available": { + "message": "No cloud URL available.\nPlease mark sure you have a working Remote UI." + } + }, "system_health": { "info": { "can_reach_cert_server": "Reach Certificate Server", @@ -73,6 +81,10 @@ } }, "services": { + "create_temporary_strict_connection_url": { + "name": "Create a temporary strict connection URL", + "description": "Create a temporary strict connection URL, which can be used to login on another device." + }, "remote_connect": { "name": "Remote connect", "description": "Makes the instance UI accessible from outside of the local network by using Home Assistant Cloud." diff --git a/homeassistant/components/cloud/util.py b/homeassistant/components/cloud/util.py new file mode 100644 index 00000000000..3e055851fff --- /dev/null +++ b/homeassistant/components/cloud/util.py @@ -0,0 +1,15 @@ +"""Cloud util functions.""" + +from hass_nabucasa import Cloud + +from homeassistant.components import http +from homeassistant.core import HomeAssistant + +from .client import CloudClient +from .const import DOMAIN + + +def get_strict_connection_mode(hass: HomeAssistant) -> http.const.StrictConnectionMode: + """Get the strict connection mode.""" + cloud: Cloud[CloudClient] = hass.data[DOMAIN] + return cloud.client.prefs.strict_connection diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index f9532b90ce6..83601599d88 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -69,6 +69,7 @@ from homeassistant.util.json import json_loads from .auth import async_setup_auth, async_sign_path from .ban import setup_bans from .const import ( # noqa: F401 + DOMAIN, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER, StrictConnectionMode, @@ -82,8 +83,6 @@ from .security_filter import setup_security_filter from .static import CACHE_HEADERS, CachingStaticResource from .web_runner import HomeAssistantTCPSite -DOMAIN: Final = "http" - CONF_SERVER_HOST: Final = "server_host" CONF_SERVER_PORT: Final = "server_port" CONF_BASE_URL: Final = "base_url" @@ -149,7 +148,7 @@ HTTP_SCHEMA: Final = vol.All( vol.Optional(CONF_USE_X_FRAME_OPTIONS, default=True): cv.boolean, vol.Optional( CONF_STRICT_CONNECTION, default=StrictConnectionMode.DISABLED - ): vol.In([e.value for e in StrictConnectionMode]), + ): vol.Coerce(StrictConnectionMode), } ), ) @@ -628,7 +627,9 @@ def _setup_services(hass: HomeAssistant, conf: ConfData) -> None: ) try: - url = get_url(hass, prefer_external=True, allow_internal=False) + url = get_url( + hass, prefer_external=True, allow_internal=False, allow_cloud=False + ) except NoURLAvailableError as ex: raise ServiceValidationError( translation_domain=DOMAIN, diff --git a/homeassistant/components/http/auth.py b/homeassistant/components/http/auth.py index 1eb74289089..889c9e76367 100644 --- a/homeassistant/components/http/auth.py +++ b/homeassistant/components/http/auth.py @@ -25,6 +25,7 @@ from homeassistant.auth.const import GROUP_ID_READ_ONLY from homeassistant.auth.models import User from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import singleton from homeassistant.helpers.http import current_request from homeassistant.helpers.json import json_bytes from homeassistant.helpers.network import is_cloud_connection @@ -32,6 +33,7 @@ from homeassistant.helpers.storage import Store from homeassistant.util.network import is_local from .const import ( + DOMAIN, KEY_AUTHENTICATED, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER, @@ -50,8 +52,9 @@ STORAGE_VERSION = 1 STORAGE_KEY = "http.auth" CONTENT_USER_NAME = "Home Assistant Content" STRICT_CONNECTION_EXCLUDED_PATH = "/api/webhook/" +STRICT_CONNECTION_STATIC_PAGE_NAME = "strict_connection_static_page.html" STRICT_CONNECTION_STATIC_PAGE = os.path.join( - os.path.dirname(__file__), "strict_connection_static_page.html" + os.path.dirname(__file__), STRICT_CONNECTION_STATIC_PAGE_NAME ) @@ -156,16 +159,10 @@ async def async_setup_auth( await store.async_save(data) hass.data[STORAGE_KEY] = refresh_token.id - strict_connection_static_file_content = None + if strict_connection_mode_non_cloud is StrictConnectionMode.STATIC_PAGE: - - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: - return file.read() - - strict_connection_static_file_content = await hass.async_add_executor_job( - read_static_page - ) + # Load the static page content on setup + await _read_strict_connection_static_page(hass) @callback def async_validate_auth_header(request: Request) -> bool: @@ -255,21 +252,36 @@ async def async_setup_auth( authenticated = True auth_type = "signed request" - if ( - not authenticated - and strict_connection_mode_non_cloud is not StrictConnectionMode.DISABLED - and not request.path.startswith(STRICT_CONNECTION_EXCLUDED_PATH) - and not await hass.auth.session.async_validate_request_for_strict_connection_session( - request - ) - and ( - resp := _async_perform_action_on_non_local( - request, strict_connection_static_file_content - ) - ) - is not None + if not authenticated and not request.path.startswith( + STRICT_CONNECTION_EXCLUDED_PATH ): - return resp + strict_connection_mode = strict_connection_mode_non_cloud + strict_connection_func = ( + _async_perform_strict_connection_action_on_non_local + ) + if is_cloud_connection(hass): + from homeassistant.components.cloud.util import ( # pylint: disable=import-outside-toplevel + get_strict_connection_mode, + ) + + strict_connection_mode = get_strict_connection_mode(hass) + strict_connection_func = _async_perform_strict_connection_action + + if ( + strict_connection_mode is not StrictConnectionMode.DISABLED + and not await hass.auth.session.async_validate_request_for_strict_connection_session( + request + ) + and ( + resp := await strict_connection_func( + hass, + request, + strict_connection_mode is StrictConnectionMode.STATIC_PAGE, + ) + ) + is not None + ): + return resp if authenticated and _LOGGER.isEnabledFor(logging.DEBUG): _LOGGER.debug( @@ -286,17 +298,17 @@ async def async_setup_auth( app.middlewares.append(auth_middleware) -@callback -def _async_perform_action_on_non_local( +async def _async_perform_strict_connection_action_on_non_local( + hass: HomeAssistant, request: Request, - strict_connection_static_file_content: str | None, + static_page: bool, ) -> StreamResponse | None: """Perform strict connection mode action if the request is not local. The function does the following: - Try to get the IP address of the request. If it fails, assume it's not local - If the request is local, return None (allow the request to continue) - - If strict_connection_static_file_content is set, return a response with the content + - If static_page is True, return a response with the content - Otherwise close the connection and raise an exception """ try: @@ -308,10 +320,25 @@ def _async_perform_action_on_non_local( if ip_address_ and is_local(ip_address_): return None - _LOGGER.debug("Perform strict connection action for %s", ip_address_) - if strict_connection_static_file_content: + return await _async_perform_strict_connection_action(hass, request, static_page) + + +async def _async_perform_strict_connection_action( + hass: HomeAssistant, + request: Request, + static_page: bool, +) -> StreamResponse | None: + """Perform strict connection mode action. + + The function does the following: + - If static_page is True, return a response with the content + - Otherwise close the connection and raise an exception + """ + + _LOGGER.debug("Perform strict connection action for %s", request.remote) + if static_page: return Response( - text=strict_connection_static_file_content, + text=await _read_strict_connection_static_page(hass), content_type="text/html", status=HTTPStatus.IM_A_TEAPOT, ) @@ -322,3 +349,14 @@ def _async_perform_action_on_non_local( # We need to raise an exception to stop processing the request raise HTTPBadRequest + + +@singleton.singleton(f"{DOMAIN}_{STRICT_CONNECTION_STATIC_PAGE_NAME}") +async def _read_strict_connection_static_page(hass: HomeAssistant) -> str: + """Read the strict connection static page from disk via executor.""" + + def read_static_page() -> str: + with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + return file.read() + + return await hass.async_add_executor_job(read_static_page) diff --git a/homeassistant/components/http/const.py b/homeassistant/components/http/const.py index d02416c531b..662596288c7 100644 --- a/homeassistant/components/http/const.py +++ b/homeassistant/components/http/const.py @@ -5,6 +5,8 @@ from typing import Final from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS # noqa: F401 +DOMAIN: Final = "http" + KEY_HASS_USER: Final = "hass_user" KEY_HASS_REFRESH_TOKEN_ID: Final = "hass_refresh_token_id" diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index 6e8fa8dc3a3..d5891973e40 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -122,6 +122,7 @@ def get_url( require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, + require_cloud: bool = False, allow_internal: bool = True, allow_external: bool = True, allow_cloud: bool = True, @@ -145,7 +146,7 @@ def get_url( # Try finding an URL in the order specified for url_type in order: - if allow_internal and url_type == TYPE_URL_INTERNAL: + if allow_internal and url_type == TYPE_URL_INTERNAL and not require_cloud: with suppress(NoURLAvailableError): return _get_internal_url( hass, @@ -155,7 +156,7 @@ def get_url( require_standard_port=require_standard_port, ) - if allow_external and url_type == TYPE_URL_EXTERNAL: + if require_cloud or (allow_external and url_type == TYPE_URL_EXTERNAL): with suppress(NoURLAvailableError): return _get_external_url( hass, @@ -165,7 +166,10 @@ def get_url( require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, + require_cloud=require_cloud, ) + if require_cloud: + raise NoURLAvailableError # For current request, we accept loopback interfaces (e.g., 127.0.0.1), # the Supervisor hostname and localhost transparently @@ -263,8 +267,12 @@ def _get_external_url( require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, + require_cloud: bool = False, ) -> str: """Get external URL of this instance.""" + if require_cloud: + return _get_cloud_url(hass, require_current_request=require_current_request) + if prefer_cloud and allow_cloud: with suppress(NoURLAvailableError): return _get_cloud_url(hass) diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 1547bc1e829..d4eb135a265 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -152,6 +152,7 @@ IGNORE_VIOLATIONS = { ("demo", "manual"), # This would be a circular dep ("http", "network"), + ("http", "cloud"), # This would be a circular dep ("zha", "homeassistant_hardware"), ("zha", "homeassistant_sky_connect"), diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 5e15aa32b6f..bcddc32f107 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -24,6 +24,7 @@ from homeassistant.components.homeassistant.exposed_entities import ( ExposedEntities, async_expose_entity, ) +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.const import CONTENT_TYPE_JSON, __version__ as HA_VERSION from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er @@ -387,6 +388,7 @@ async def test_cloud_connection_info(hass: HomeAssistant) -> None: "connected": False, "enabled": False, "instance_domain": None, + "strict_connection": StrictConnectionMode.DISABLED, }, "version": HA_VERSION, } diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 5ee9af88681..d9d2b5c6742 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -19,6 +19,7 @@ from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DEFAULT_EXPOSED_DOMAINS, DOMAIN from homeassistant.components.google_assistant.helpers import GoogleEntity from homeassistant.components.homeassistant import exposed_entities +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.components.websocket_api import ERR_INVALID_FORMAT from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er @@ -782,6 +783,7 @@ async def test_websocket_status( "google_report_state": True, "remote_allow_remote_enable": True, "remote_enabled": False, + "strict_connection": "disabled", "tts_default_voice": ["en-US", "JennyNeural"], }, "alexa_entities": { @@ -901,6 +903,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin is None assert cloud.client.prefs.remote_allow_remote_enable is True + assert cloud.client.prefs.strict_connection is StrictConnectionMode.DISABLED client = await hass_ws_client(hass) @@ -912,6 +915,7 @@ async def test_websocket_update_preferences( "google_secure_devices_pin": "1234", "tts_default_voice": ["en-GB", "RyanNeural"], "remote_allow_remote_enable": False, + "strict_connection": StrictConnectionMode.DROP_CONNECTION, } ) response = await client.receive_json() @@ -922,6 +926,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.google_secure_devices_pin == "1234" assert cloud.client.prefs.remote_allow_remote_enable is False assert cloud.client.prefs.tts_default_voice == ("en-GB", "RyanNeural") + assert cloud.client.prefs.strict_connection is StrictConnectionMode.DROP_CONNECTION @pytest.mark.parametrize( diff --git a/tests/components/cloud/test_init.py b/tests/components/cloud/test_init.py index 9cc1324ebc1..98f9a54c04b 100644 --- a/tests/components/cloud/test_init.py +++ b/tests/components/cloud/test_init.py @@ -3,6 +3,7 @@ from collections.abc import Callable, Coroutine from typing import Any from unittest.mock import MagicMock, patch +from urllib.parse import quote_plus from hass_nabucasa import Cloud import pytest @@ -13,11 +14,16 @@ from homeassistant.components.cloud import ( CloudNotConnected, async_get_or_create_cloudhook, ) -from homeassistant.components.cloud.const import DOMAIN, PREF_CLOUDHOOKS +from homeassistant.components.cloud.const import ( + DOMAIN, + PREF_CLOUDHOOKS, + PREF_STRICT_CONNECTION, +) from homeassistant.components.cloud.prefs import STORAGE_KEY +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, HomeAssistant -from homeassistant.exceptions import Unauthorized +from homeassistant.exceptions import ServiceValidationError, Unauthorized from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockUser @@ -295,3 +301,77 @@ async def test_cloud_logout( await hass.async_block_till_done() assert cloud.is_logged_in is False + + +async def test_service_create_temporary_strict_connection_url_strict_connection_disabled( + hass: HomeAssistant, +) -> None: + """Test service create_temporary_strict_connection_url with strict_connection not enabled.""" + mock_config_entry = MockConfigEntry(domain=DOMAIN) + mock_config_entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + with pytest.raises( + ServiceValidationError, + match="Strict connection is not enabled for cloud requests", + ): + await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("mode"), + [ + StrictConnectionMode.DROP_CONNECTION, + StrictConnectionMode.STATIC_PAGE, + ], +) +async def test_service_create_temporary_strict_connection( + hass: HomeAssistant, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + mode: StrictConnectionMode, +) -> None: + """Test service create_temporary_strict_connection_url.""" + mock_config_entry = MockConfigEntry(domain=DOMAIN) + mock_config_entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: mode, + } + ) + + # No cloud url set + with pytest.raises(ServiceValidationError, match="No cloud URL available"): + await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + + # Patch cloud url + url = "https://example.com" + with patch( + "homeassistant.helpers.network._get_cloud_url", + return_value=url, + ): + response = await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + assert isinstance(response, dict) + direct_url_prefix = f"{url}/auth/strict_connection/temp_token?authSig=" + assert response.pop("direct_url").startswith(direct_url_prefix) + assert response.pop("url").startswith( + f"https://login.home-assistant.io?u={quote_plus(direct_url_prefix)}" + ) + assert response == {} # No more keys in response diff --git a/tests/components/cloud/test_prefs.py b/tests/components/cloud/test_prefs.py index 9b0fa4c01d7..1ed2e1d524f 100644 --- a/tests/components/cloud/test_prefs.py +++ b/tests/components/cloud/test_prefs.py @@ -6,8 +6,13 @@ from unittest.mock import ANY, MagicMock, patch import pytest from homeassistant.auth.const import GROUP_ID_ADMIN -from homeassistant.components.cloud.const import DOMAIN, PREF_TTS_DEFAULT_VOICE +from homeassistant.components.cloud.const import ( + DOMAIN, + PREF_STRICT_CONNECTION, + PREF_TTS_DEFAULT_VOICE, +) from homeassistant.components.cloud.prefs import STORAGE_KEY, CloudPreferences +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -174,3 +179,21 @@ async def test_tts_default_voice_legacy_gender( await hass.async_block_till_done() assert cloud.client.prefs.tts_default_voice == (expected_language, voice) + + +@pytest.mark.parametrize("mode", list(StrictConnectionMode)) +async def test_strict_connection_convertion( + hass: HomeAssistant, + cloud: MagicMock, + hass_storage: dict[str, Any], + mode: StrictConnectionMode, +) -> None: + """Test strict connection string value will be converted to the enum.""" + hass_storage[STORAGE_KEY] = { + "version": 1, + "data": {PREF_STRICT_CONNECTION: mode.value}, + } + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + + assert cloud.client.prefs.strict_connection is mode diff --git a/tests/components/cloud/test_strict_connection.py b/tests/components/cloud/test_strict_connection.py new file mode 100644 index 00000000000..844096ab0eb --- /dev/null +++ b/tests/components/cloud/test_strict_connection.py @@ -0,0 +1,294 @@ +"""Test strict connection mode for cloud.""" + +from collections.abc import Awaitable, Callable, Coroutine, Generator +from contextlib import contextmanager +from datetime import timedelta +from http import HTTPStatus +from typing import Any +from unittest.mock import MagicMock, Mock, patch + +from aiohttp import ServerDisconnectedError, web +from aiohttp.test_utils import TestClient +from aiohttp_session import get_session +import pytest +from yarl import URL + +from homeassistant.auth.models import RefreshToken +from homeassistant.auth.session import SESSION_ID, TEMP_TIMEOUT +from homeassistant.components.cloud.const import PREF_STRICT_CONNECTION +from homeassistant.components.http import KEY_HASS +from homeassistant.components.http.auth import ( + STRICT_CONNECTION_STATIC_PAGE, + async_setup_auth, + async_sign_path, +) +from homeassistant.components.http.const import KEY_AUTHENTICATED, StrictConnectionMode +from homeassistant.components.http.session import COOKIE_NAME, PREFIXED_COOKIE_NAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import is_cloud_connection +from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow + +from tests.common import async_fire_time_changed +from tests.typing import ClientSessionGenerator + + +@pytest.fixture +async def refresh_token(hass: HomeAssistant, hass_access_token: str) -> RefreshToken: + """Return a refresh token.""" + refresh_token = hass.auth.async_validate_access_token(hass_access_token) + assert refresh_token + session = hass.auth.session + assert session._strict_connection_sessions == {} + assert session._temp_sessions == {} + return refresh_token + + +@contextmanager +def simulate_cloud_request() -> Generator[None, None, None]: + """Simulate a cloud request.""" + with patch( + "hass_nabucasa.remote.is_cloud_request", Mock(get=Mock(return_value=True)) + ): + yield + + +@pytest.fixture +def app_strict_connection( + hass: HomeAssistant, refresh_token: RefreshToken +) -> web.Application: + """Fixture to set up a web.Application.""" + + async def handler(request): + """Return if request was authenticated.""" + return web.json_response(data={"authenticated": request[KEY_AUTHENTICATED]}) + + app = web.Application() + app[KEY_HASS] = hass + app.router.add_get("/", handler) + + async def set_cookie(request: web.Request) -> web.Response: + hass = request.app[KEY_HASS] + # Clear all sessions + hass.auth.session._temp_sessions.clear() + hass.auth.session._strict_connection_sessions.clear() + + if request.query["token"] == "refresh": + await hass.auth.session.async_create_session(request, refresh_token) + else: + await hass.auth.session.async_create_temp_unauthorized_session(request) + session = await get_session(request) + return web.Response(text=session[SESSION_ID]) + + app.router.add_get("/test/cookie", set_cookie) + return app + + +@pytest.fixture(name="client") +async def set_up_fixture( + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + app_strict_connection: web.Application, + cloud: MagicMock, + socket_enabled: None, +) -> TestClient: + """Set up the fixture.""" + + await async_setup_auth(hass, app_strict_connection, StrictConnectionMode.DISABLED) + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + return await aiohttp_client(app_strict_connection) + + +@pytest.mark.parametrize( + "strict_connection_mode", [e.value for e in StrictConnectionMode] +) +async def test_strict_connection_cloud_authenticated_requests( + hass: HomeAssistant, + client: TestClient, + hass_access_token: str, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + refresh_token: RefreshToken, + strict_connection_mode: StrictConnectionMode, +) -> None: + """Test authenticated requests with strict connection.""" + assert hass.auth.session._strict_connection_sessions == {} + + signed_path = async_sign_path( + hass, "/", timedelta(seconds=5), refresh_token_id=refresh_token.id + ) + + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: strict_connection_mode, + } + ) + + with simulate_cloud_request(): + assert is_cloud_connection(hass) + req = await client.get( + "/", headers={"Authorization": f"Bearer {hass_access_token}"} + ) + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": True} + req = await client.get(signed_path) + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": True} + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + _: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled.""" + with simulate_cloud_request(): + assert is_cloud_connection(hass) + await perform_unauthenticated_request(hass, client) + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests_refresh_token( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + refresh_token: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled and refresh token cookie.""" + session = hass.auth.session + + # set strict connection cookie with refresh token + session_id = await _modify_cookie_for_cloud(client, "refresh") + assert session._strict_connection_sessions == {session_id: refresh_token.id} + with simulate_cloud_request(): + assert is_cloud_connection(hass) + req = await client.get("/") + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": False} + + # Invalidate refresh token, which should also invalidate session + hass.auth.async_remove_refresh_token(refresh_token) + assert session._strict_connection_sessions == {} + + await perform_unauthenticated_request(hass, client) + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests_temp_session( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + _: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled and temp cookie.""" + session = hass.auth.session + + # set strict connection cookie with temp session + assert session._temp_sessions == {} + session_id = await _modify_cookie_for_cloud(client, "temp") + assert session_id in session._temp_sessions + with simulate_cloud_request(): + assert is_cloud_connection(hass) + resp = await client.get("/") + assert resp.status == HTTPStatus.OK + assert await resp.json() == {"authenticated": False} + + async_fire_time_changed(hass, utcnow() + TEMP_TIMEOUT + timedelta(minutes=1)) + await hass.async_block_till_done(wait_background_tasks=True) + assert session._temp_sessions == {} + + await perform_unauthenticated_request(hass, client) + + +async def _drop_connection_unauthorized_request( + _: HomeAssistant, client: TestClient +) -> None: + with pytest.raises(ServerDisconnectedError): + # unauthorized requests should raise ServerDisconnectedError + await client.get("/") + + +async def _static_page_unauthorized_request( + hass: HomeAssistant, client: TestClient +) -> None: + req = await client.get("/") + assert req.status == HTTPStatus.IM_A_TEAPOT + + def read_static_page() -> str: + with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + return file.read() + + assert await req.text() == await hass.async_add_executor_job(read_static_page) + + +@pytest.mark.parametrize( + "test_func", + [ + _test_strict_connection_cloud_enabled_external_unauthenticated_requests, + _test_strict_connection_cloud_enabled_external_unauthenticated_requests_refresh_token, + _test_strict_connection_cloud_enabled_external_unauthenticated_requests_temp_session, + ], + ids=[ + "no cookie", + "refresh token cookie", + "temp session cookie", + ], +) +@pytest.mark.parametrize( + ("strict_connection_mode", "request_func"), + [ + (StrictConnectionMode.DROP_CONNECTION, _drop_connection_unauthorized_request), + (StrictConnectionMode.STATIC_PAGE, _static_page_unauthorized_request), + ], + ids=["drop connection", "static page"], +) +async def test_strict_connection_cloud_external_unauthenticated_requests( + hass: HomeAssistant, + client: TestClient, + refresh_token: RefreshToken, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + test_func: Callable[ + [ + HomeAssistant, + TestClient, + Callable[[HomeAssistant, TestClient], Awaitable[None]], + RefreshToken, + ], + Awaitable[None], + ], + strict_connection_mode: StrictConnectionMode, + request_func: Callable[[HomeAssistant, TestClient], Awaitable[None]], +) -> None: + """Test external unauthenticated requests with strict connection cloud.""" + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: strict_connection_mode, + } + ) + + await test_func( + hass, + client, + request_func, + refresh_token, + ) + + +async def _modify_cookie_for_cloud(client: TestClient, token_type: str) -> str: + """Modify cookie for cloud.""" + # Cloud cookie has set secure=true and will not set on unsecure connection + # As we test with unsecure connection, we need to set it manually + # We get the session via http and modify the cookie name to the secure one + session_id = await (await client.get(f"/test/cookie?token={token_type}")).text() + cookie_jar = client.session.cookie_jar + localhost = URL("http://127.0.0.1") + cookie = cookie_jar.filter_cookies(localhost)[COOKIE_NAME].value + assert cookie + cookie_jar.clear() + cookie_jar.update_cookies({PREFIXED_COOKIE_NAME: cookie}, localhost) + return session_id diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index caffebf094e..3c9594bca38 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -362,6 +362,18 @@ async def test_get_url_external(hass: HomeAssistant) -> None: with pytest.raises(NoURLAvailableError): _get_external_url(hass, require_current_request=True, require_ssl=True) + with pytest.raises(NoURLAvailableError): + _get_external_url(hass, require_cloud=True) + + with patch( + "homeassistant.components.cloud.async_remote_ui_url", + return_value="https://example.nabu.casa", + ): + hass.config.components.add("cloud") + assert ( + _get_external_url(hass, require_cloud=True) == "https://example.nabu.casa" + ) + async def test_get_cloud_url(hass: HomeAssistant) -> None: """Test getting an instance URL when the user has set an external URL.""" From 6f2a2ba46e5220fb98df88b64a8ca7d445327b5e Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Wed, 24 Apr 2024 10:00:18 +0200 Subject: [PATCH 350/426] Bump plugwise to v0.37.3 (#116081) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 1eb1cf6e8b6..ada7d2d2533 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==0.37.2"], + "requirements": ["plugwise==0.37.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 42f716f58e3..9469fa4f8f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1548,7 +1548,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.2 +plugwise==0.37.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 11533275029..7159b90ed09 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1225,7 +1225,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.2 +plugwise==0.37.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From 79b488981239b5f673ba12bc5d808c9c4c2973b9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 10:05:52 +0200 Subject: [PATCH 351/426] Always do thread safety checks when writing state for custom components (#116044) --- homeassistant/helpers/entity.py | 25 +++++++++++++++++++++---- tests/helpers/test_entity.py | 26 ++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 40b145727a1..cf21882eec8 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -521,6 +521,7 @@ class Entity( # While not purely typed, it makes typehinting more useful for us # and removes the need for constant None checks or asserts. _state_info: StateInfo = None # type: ignore[assignment] + _is_custom_component: bool = False __capabilities_updated_at: deque[float] __capabilities_updated_at_reported: bool = False @@ -967,8 +968,8 @@ class Entity( self._async_write_ha_state() @callback - def async_write_ha_state(self) -> None: - """Write the state to the state machine.""" + def _async_verify_state_writable(self) -> None: + """Verify the entity is in a writable state.""" if self.hass is None: raise RuntimeError(f"Attribute hass is None for {self}") if self.hass.config.debug: @@ -995,6 +996,18 @@ class Entity( f"No entity id specified for entity {self.name}" ) + @callback + def _async_write_ha_state_from_call_soon_threadsafe(self) -> None: + """Write the state to the state machine from the event loop thread.""" + self._async_verify_state_writable() + self._async_write_ha_state() + + @callback + def async_write_ha_state(self) -> None: + """Write the state to the state machine.""" + self._async_verify_state_writable() + if self._is_custom_component or self.hass.config.debug: + self.hass.verify_event_loop_thread("async_write_ha_state") self._async_write_ha_state() def _stringify_state(self, available: bool) -> str: @@ -1221,7 +1234,9 @@ class Entity( f"Entity {self.entity_id} schedule update ha state", ) else: - self.hass.loop.call_soon_threadsafe(self.async_write_ha_state) + self.hass.loop.call_soon_threadsafe( + self._async_write_ha_state_from_call_soon_threadsafe + ) @callback def async_schedule_update_ha_state(self, force_refresh: bool = False) -> None: @@ -1426,10 +1441,12 @@ class Entity( Not to be extended by integrations. """ + is_custom_component = "custom_components" in type(self).__module__ entity_info: EntityInfo = { "domain": self.platform.platform_name, - "custom_component": "custom_components" in type(self).__module__, + "custom_component": is_custom_component, } + self._is_custom_component = is_custom_component if self.platform.config_entry: entity_info["config_entry"] = self.platform.config_entry.entry_id diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 349c065f9b5..a80674e0f76 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -2615,3 +2615,29 @@ async def test_async_write_ha_state_thread_safety(hass: HomeAssistant) -> None: ): await hass.async_add_executor_job(ent2.async_write_ha_state) assert not hass.states.get(ent2.entity_id) + + +async def test_async_write_ha_state_thread_safety_custom_component( + hass: HomeAssistant, +) -> None: + """Test async_write_ha_state thread safe for custom components.""" + + ent = entity.Entity() + ent._is_custom_component = True + ent.entity_id = "test.any" + ent.hass = hass + ent.platform = MockEntityPlatform(hass, domain="test") + ent.async_write_ha_state() + assert hass.states.get(ent.entity_id) + + ent2 = entity.Entity() + ent2._is_custom_component = True + ent2.entity_id = "test.any2" + ent2.hass = hass + ent2.platform = MockEntityPlatform(hass, domain="test") + with pytest.raises( + RuntimeError, + match="Detected code that calls async_write_ha_state from a thread.", + ): + await hass.async_add_executor_job(ent2.async_write_ha_state) + assert not hass.states.get(ent2.entity_id) From c4340f6f5f76021752db41537ad52752feef4fba Mon Sep 17 00:00:00 2001 From: Gage Benne Date: Wed, 24 Apr 2024 04:16:35 -0400 Subject: [PATCH 352/426] Ecobee preset mode icon translations (#116072) --- homeassistant/components/ecobee/climate.py | 72 ++++++++++++---------- tests/components/ecobee/test_climate.py | 2 +- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index e341f4176ad..11675c0bf61 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -12,7 +12,10 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_LOW, FAN_AUTO, FAN_ON, + PRESET_AWAY, + PRESET_HOME, PRESET_NONE, + PRESET_SLEEP, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -60,9 +63,6 @@ PRESET_TEMPERATURE = "temp" PRESET_VACATION = "vacation" PRESET_HOLD_NEXT_TRANSITION = "next_transition" PRESET_HOLD_INDEFINITE = "indefinite" -AWAY_MODE = "awayMode" -PRESET_HOME = "home" -PRESET_SLEEP = "sleep" HAS_HEAT_PUMP = "hasHeatPump" DEFAULT_MIN_HUMIDITY = 15 @@ -103,6 +103,13 @@ ECOBEE_HVAC_ACTION_TO_HASS = { "compWaterHeater": None, } +ECOBEE_TO_HASS_PRESET = { + "Away": PRESET_AWAY, + "Home": PRESET_HOME, + "Sleep": PRESET_SLEEP, +} +HASS_TO_ECOBEE_PRESET = {v: k for k, v in ECOBEE_TO_HASS_PRESET.items()} + PRESET_TO_ECOBEE_HOLD = { PRESET_HOLD_NEXT_TRANSITION: "nextTransition", PRESET_HOLD_INDEFINITE: "indefinite", @@ -348,10 +355,6 @@ class Thermostat(ClimateEntity): self._attr_hvac_modes.insert(0, HVACMode.HEAT_COOL) self._attr_hvac_modes.append(HVACMode.OFF) - self._preset_modes = { - comfort["climateRef"]: comfort["name"] - for comfort in self.thermostat["program"]["climates"] - } self.update_without_throttle = False async def async_update(self) -> None: @@ -474,7 +477,7 @@ class Thermostat(ClimateEntity): return self.thermostat["runtime"]["desiredFanMode"] @property - def preset_mode(self): + def preset_mode(self) -> str | None: """Return current preset mode.""" events = self.thermostat["events"] for event in events: @@ -487,8 +490,8 @@ class Thermostat(ClimateEntity): ): return PRESET_AWAY_INDEFINITELY - if event["holdClimateRef"] in self._preset_modes: - return self._preset_modes[event["holdClimateRef"]] + if name := self.comfort_settings.get(event["holdClimateRef"]): + return ECOBEE_TO_HASS_PRESET.get(name, name) # Any hold not based on a climate is a temp hold return PRESET_TEMPERATURE @@ -499,7 +502,12 @@ class Thermostat(ClimateEntity): self.vacation = event["name"] return PRESET_VACATION - return self._preset_modes[self.thermostat["program"]["currentClimateRef"]] + if name := self.comfort_settings.get( + self.thermostat["program"]["currentClimateRef"] + ): + return ECOBEE_TO_HASS_PRESET.get(name, name) + + return None @property def hvac_mode(self): @@ -545,14 +553,14 @@ class Thermostat(ClimateEntity): return HVACAction.IDLE @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" status = self.thermostat["equipmentStatus"] return { "fan": self.fan, - "climate_mode": self._preset_modes[ + "climate_mode": self.comfort_settings.get( self.thermostat["program"]["currentClimateRef"] - ], + ), "equipment_running": status, "fan_min_on_time": self.settings["fanMinOnTime"], } @@ -577,6 +585,8 @@ class Thermostat(ClimateEntity): def set_preset_mode(self, preset_mode: str) -> None: """Activate a preset.""" + preset_mode = HASS_TO_ECOBEE_PRESET.get(preset_mode, preset_mode) + if preset_mode == self.preset_mode: return @@ -605,25 +615,14 @@ class Thermostat(ClimateEntity): elif preset_mode == PRESET_NONE: self.data.ecobee.resume_program(self.thermostat_index) - elif preset_mode in self.preset_modes: - climate_ref = None - - for comfort in self.thermostat["program"]["climates"]: - if comfort["name"] == preset_mode: - climate_ref = comfort["climateRef"] + else: + for climate_ref, name in self.comfort_settings.items(): + if name == preset_mode: + preset_mode = climate_ref break - - if climate_ref is not None: - self.data.ecobee.set_climate_hold( - self.thermostat_index, - climate_ref, - self.hold_preference(), - self.hold_hours(), - ) else: _LOGGER.warning("Received unknown preset mode: %s", preset_mode) - else: self.data.ecobee.set_climate_hold( self.thermostat_index, preset_mode, @@ -632,11 +631,22 @@ class Thermostat(ClimateEntity): ) @property - def preset_modes(self): + def preset_modes(self) -> list[str] | None: """Return available preset modes.""" # Return presets provided by the ecobee API, and an indefinite away # preset which we handle separately in set_preset_mode(). - return [*self._preset_modes.values(), PRESET_AWAY_INDEFINITELY] + return [ + ECOBEE_TO_HASS_PRESET.get(name, name) + for name in self.comfort_settings.values() + ] + [PRESET_AWAY_INDEFINITELY] + + @property + def comfort_settings(self) -> dict[str, str]: + """Return ecobee API comfort settings.""" + return { + comfort["climateRef"]: comfort["name"] + for comfort in self.thermostat["program"]["climates"] + } def set_auto_temp_hold(self, heat_temp, cool_temp): """Set temperature hold in auto mode.""" diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 7ea9950e2d4..46ca77025cc 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -441,7 +441,7 @@ async def test_preset_indefinite_away(ecobee_fixture, thermostat) -> None: """Test indefinite away showing correctly, and not as temporary away.""" ecobee_fixture["program"]["currentClimateRef"] = "away" ecobee_fixture["events"][0]["holdClimateRef"] = "away" - assert thermostat.preset_mode == "Away" + assert thermostat.preset_mode == "away" ecobee_fixture["events"][0]["endDate"] = "2999-01-01" assert thermostat.preset_mode == PRESET_AWAY_INDEFINITELY From 102b34123c2cf523126e4b92bc9d24536a831f76 Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Wed, 24 Apr 2024 10:17:01 +0200 Subject: [PATCH 353/426] Bump zha-quirks to 0.0.115 (#116071) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 7741673557d..9b7788ff129 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -24,7 +24,7 @@ "bellows==0.38.1", "pyserial==3.5", "pyserial-asyncio==0.6", - "zha-quirks==0.0.114", + "zha-quirks==0.0.115", "zigpy-deconz==0.23.1", "zigpy==0.63.5", "zigpy-xbee==0.20.1", diff --git a/requirements_all.txt b/requirements_all.txt index 9469fa4f8f1..b8b11172a91 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2938,7 +2938,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.114 +zha-quirks==0.0.115 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7159b90ed09..f5df78f7bcf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2285,7 +2285,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.114 +zha-quirks==0.0.115 # homeassistant.components.zha zigpy-deconz==0.23.1 From 07d68eacfa77053c5be1211cec939792701cd0b5 Mon Sep 17 00:00:00 2001 From: Simon Roberts Date: Wed, 24 Apr 2024 18:24:49 +1000 Subject: [PATCH 354/426] Fix iotawatt warnings about "Detected new cycle for sensor.{sensorname}_wh_last" (#115909) * Bump ha-iotawattpy to 0.1.2 * Remove energy energy-over-period sensors: they cause issue for HA --------- Co-authored-by: Stefan Agner --- homeassistant/components/iotawatt/coordinator.py | 1 + homeassistant/components/iotawatt/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iotawatt/coordinator.py b/homeassistant/components/iotawatt/coordinator.py index e741c7a5a27..4f9ac1f94b7 100644 --- a/homeassistant/components/iotawatt/coordinator.py +++ b/homeassistant/components/iotawatt/coordinator.py @@ -63,6 +63,7 @@ class IotawattUpdater(DataUpdateCoordinator): self.entry.data.get(CONF_USERNAME), self.entry.data.get(CONF_PASSWORD), integratedInterval="d", + includeNonTotalSensors=False, ) try: is_authenticated = await api.connect() diff --git a/homeassistant/components/iotawatt/manifest.json b/homeassistant/components/iotawatt/manifest.json index 5beaa1e318c..5fd178389d9 100644 --- a/homeassistant/components/iotawatt/manifest.json +++ b/homeassistant/components/iotawatt/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/iotawatt", "iot_class": "local_polling", "loggers": ["iotawattpy"], - "requirements": ["ha-iotawattpy==0.1.1"] + "requirements": ["ha-iotawattpy==0.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index b8b11172a91..5a7ce85328a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1026,7 +1026,7 @@ ha-av==10.1.1 ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt -ha-iotawattpy==0.1.1 +ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f5df78f7bcf..4831f441286 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -840,7 +840,7 @@ ha-av==10.1.1 ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt -ha-iotawattpy==0.1.1 +ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.1.1 From bcc2dd99b28b8094b3e123dd5bc56d949ee6fb2a Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 24 Apr 2024 10:29:59 +0200 Subject: [PATCH 355/426] Rename strict connection static page to guard page (#116085) --- homeassistant/components/http/auth.py | 40 +++++++++---------- homeassistant/components/http/const.py | 2 +- ...html => strict_connection_guard_page.html} | 0 tests/components/cloud/test_init.py | 2 +- .../cloud/test_strict_connection.py | 12 +++--- tests/components/http/test_auth.py | 12 +++--- tests/components/http/test_init.py | 2 +- 7 files changed, 35 insertions(+), 35 deletions(-) rename homeassistant/components/http/{strict_connection_static_page.html => strict_connection_guard_page.html} (100%) diff --git a/homeassistant/components/http/auth.py b/homeassistant/components/http/auth.py index 889c9e76367..58dae21d2a6 100644 --- a/homeassistant/components/http/auth.py +++ b/homeassistant/components/http/auth.py @@ -52,9 +52,9 @@ STORAGE_VERSION = 1 STORAGE_KEY = "http.auth" CONTENT_USER_NAME = "Home Assistant Content" STRICT_CONNECTION_EXCLUDED_PATH = "/api/webhook/" -STRICT_CONNECTION_STATIC_PAGE_NAME = "strict_connection_static_page.html" -STRICT_CONNECTION_STATIC_PAGE = os.path.join( - os.path.dirname(__file__), STRICT_CONNECTION_STATIC_PAGE_NAME +STRICT_CONNECTION_GUARD_PAGE_NAME = "strict_connection_guard_page.html" +STRICT_CONNECTION_GUARD_PAGE = os.path.join( + os.path.dirname(__file__), STRICT_CONNECTION_GUARD_PAGE_NAME ) @@ -160,9 +160,9 @@ async def async_setup_auth( hass.data[STORAGE_KEY] = refresh_token.id - if strict_connection_mode_non_cloud is StrictConnectionMode.STATIC_PAGE: - # Load the static page content on setup - await _read_strict_connection_static_page(hass) + if strict_connection_mode_non_cloud is StrictConnectionMode.GUARD_PAGE: + # Load the guard page content on setup + await _read_strict_connection_guard_page(hass) @callback def async_validate_auth_header(request: Request) -> bool: @@ -276,7 +276,7 @@ async def async_setup_auth( resp := await strict_connection_func( hass, request, - strict_connection_mode is StrictConnectionMode.STATIC_PAGE, + strict_connection_mode is StrictConnectionMode.GUARD_PAGE, ) ) is not None @@ -301,14 +301,14 @@ async def async_setup_auth( async def _async_perform_strict_connection_action_on_non_local( hass: HomeAssistant, request: Request, - static_page: bool, + guard_page: bool, ) -> StreamResponse | None: """Perform strict connection mode action if the request is not local. The function does the following: - Try to get the IP address of the request. If it fails, assume it's not local - If the request is local, return None (allow the request to continue) - - If static_page is True, return a response with the content + - If guard_page is True, return a response with the content - Otherwise close the connection and raise an exception """ try: @@ -320,25 +320,25 @@ async def _async_perform_strict_connection_action_on_non_local( if ip_address_ and is_local(ip_address_): return None - return await _async_perform_strict_connection_action(hass, request, static_page) + return await _async_perform_strict_connection_action(hass, request, guard_page) async def _async_perform_strict_connection_action( hass: HomeAssistant, request: Request, - static_page: bool, + guard_page: bool, ) -> StreamResponse | None: """Perform strict connection mode action. The function does the following: - - If static_page is True, return a response with the content + - If guard_page is True, return a response with the content - Otherwise close the connection and raise an exception """ _LOGGER.debug("Perform strict connection action for %s", request.remote) - if static_page: + if guard_page: return Response( - text=await _read_strict_connection_static_page(hass), + text=await _read_strict_connection_guard_page(hass), content_type="text/html", status=HTTPStatus.IM_A_TEAPOT, ) @@ -351,12 +351,12 @@ async def _async_perform_strict_connection_action( raise HTTPBadRequest -@singleton.singleton(f"{DOMAIN}_{STRICT_CONNECTION_STATIC_PAGE_NAME}") -async def _read_strict_connection_static_page(hass: HomeAssistant) -> str: - """Read the strict connection static page from disk via executor.""" +@singleton.singleton(f"{DOMAIN}_{STRICT_CONNECTION_GUARD_PAGE_NAME}") +async def _read_strict_connection_guard_page(hass: HomeAssistant) -> str: + """Read the strict connection guard page from disk via executor.""" - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: return file.read() - return await hass.async_add_executor_job(read_static_page) + return await hass.async_add_executor_job(read_guard_page) diff --git a/homeassistant/components/http/const.py b/homeassistant/components/http/const.py index 662596288c7..4a15e310b11 100644 --- a/homeassistant/components/http/const.py +++ b/homeassistant/components/http/const.py @@ -15,5 +15,5 @@ class StrictConnectionMode(StrEnum): """Enum for strict connection mode.""" DISABLED = "disabled" - STATIC_PAGE = "static_page" + GUARD_PAGE = "guard_page" DROP_CONNECTION = "drop_connection" diff --git a/homeassistant/components/http/strict_connection_static_page.html b/homeassistant/components/http/strict_connection_guard_page.html similarity index 100% rename from homeassistant/components/http/strict_connection_static_page.html rename to homeassistant/components/http/strict_connection_guard_page.html diff --git a/tests/components/cloud/test_init.py b/tests/components/cloud/test_init.py index 98f9a54c04b..bc4526975da 100644 --- a/tests/components/cloud/test_init.py +++ b/tests/components/cloud/test_init.py @@ -327,7 +327,7 @@ async def test_service_create_temporary_strict_connection_url_strict_connection_ ("mode"), [ StrictConnectionMode.DROP_CONNECTION, - StrictConnectionMode.STATIC_PAGE, + StrictConnectionMode.GUARD_PAGE, ], ) async def test_service_create_temporary_strict_connection( diff --git a/tests/components/cloud/test_strict_connection.py b/tests/components/cloud/test_strict_connection.py index 844096ab0eb..f275bc4d2dd 100644 --- a/tests/components/cloud/test_strict_connection.py +++ b/tests/components/cloud/test_strict_connection.py @@ -18,7 +18,7 @@ from homeassistant.auth.session import SESSION_ID, TEMP_TIMEOUT from homeassistant.components.cloud.const import PREF_STRICT_CONNECTION from homeassistant.components.http import KEY_HASS from homeassistant.components.http.auth import ( - STRICT_CONNECTION_STATIC_PAGE, + STRICT_CONNECTION_GUARD_PAGE, async_setup_auth, async_sign_path, ) @@ -213,17 +213,17 @@ async def _drop_connection_unauthorized_request( await client.get("/") -async def _static_page_unauthorized_request( +async def _guard_page_unauthorized_request( hass: HomeAssistant, client: TestClient ) -> None: req = await client.get("/") assert req.status == HTTPStatus.IM_A_TEAPOT - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: return file.read() - assert await req.text() == await hass.async_add_executor_job(read_static_page) + assert await req.text() == await hass.async_add_executor_job(read_guard_page) @pytest.mark.parametrize( @@ -243,7 +243,7 @@ async def _static_page_unauthorized_request( ("strict_connection_mode", "request_func"), [ (StrictConnectionMode.DROP_CONNECTION, _drop_connection_unauthorized_request), - (StrictConnectionMode.STATIC_PAGE, _static_page_unauthorized_request), + (StrictConnectionMode.GUARD_PAGE, _guard_page_unauthorized_request), ], ids=["drop connection", "static page"], ) diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index f0f87e58173..afff8294f0c 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -30,7 +30,7 @@ from homeassistant.components.http.auth import ( DATA_SIGN_SECRET, SIGN_QUERY_PARAM, STORAGE_KEY, - STRICT_CONNECTION_STATIC_PAGE, + STRICT_CONNECTION_GUARD_PAGE, async_setup_auth, async_sign_path, async_user_not_allowed_do_auth, @@ -879,17 +879,17 @@ async def _drop_connection_unauthorized_request( await client.get("/") -async def _static_page_unauthorized_request( +async def _guard_page_unauthorized_request( hass: HomeAssistant, client: TestClient ) -> None: req = await client.get("/") assert req.status == HTTPStatus.IM_A_TEAPOT - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: return file.read() - assert await req.text() == await hass.async_add_executor_job(read_static_page) + assert await req.text() == await hass.async_add_executor_job(read_guard_page) @pytest.mark.parametrize( @@ -909,7 +909,7 @@ async def _static_page_unauthorized_request( ("strict_connection_mode", "request_func"), [ (StrictConnectionMode.DROP_CONNECTION, _drop_connection_unauthorized_request), - (StrictConnectionMode.STATIC_PAGE, _static_page_unauthorized_request), + (StrictConnectionMode.GUARD_PAGE, _guard_page_unauthorized_request), ], ids=["drop connection", "static page"], ) diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index b84da595ab1..b554737e7b3 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -548,7 +548,7 @@ async def test_service_create_temporary_strict_connection_url_strict_connection_ ("mode"), [ StrictConnectionMode.DROP_CONNECTION, - StrictConnectionMode.STATIC_PAGE, + StrictConnectionMode.GUARD_PAGE, ], ) async def test_service_create_temporary_strict_connection( From 5bded2a52dd0b816759248ecbac8a0e5535351d8 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 24 Apr 2024 10:30:57 +0200 Subject: [PATCH 356/426] Fix accuweather system_health after data change (#116063) --- .../components/accuweather/system_health.py | 2 +- tests/components/accuweather/test_system_health.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/accuweather/system_health.py b/homeassistant/components/accuweather/system_health.py index 607a557f333..f47828cb5a3 100644 --- a/homeassistant/components/accuweather/system_health.py +++ b/homeassistant/components/accuweather/system_health.py @@ -24,7 +24,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: """Get info for the info page.""" remaining_requests = list(hass.data[DOMAIN].values())[ 0 - ].accuweather.requests_remaining + ].coordinator_observation.accuweather.requests_remaining return { "can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT), diff --git a/tests/components/accuweather/test_system_health.py b/tests/components/accuweather/test_system_health.py index 6321071eaa5..562c572c830 100644 --- a/tests/components/accuweather/test_system_health.py +++ b/tests/components/accuweather/test_system_health.py @@ -5,6 +5,7 @@ from unittest.mock import Mock from aiohttp import ClientError +from homeassistant.components.accuweather import AccuWeatherData from homeassistant.components.accuweather.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -23,8 +24,10 @@ async def test_accuweather_system_health( await hass.async_block_till_done() hass.data[DOMAIN] = {} - hass.data[DOMAIN]["0123xyz"] = {} - hass.data[DOMAIN]["0123xyz"] = Mock(accuweather=Mock(requests_remaining="42")) + hass.data[DOMAIN]["0123xyz"] = AccuWeatherData( + coordinator_observation=Mock(accuweather=Mock(requests_remaining="42")), + coordinator_daily_forecast=Mock(), + ) info = await get_system_health_info(hass, DOMAIN) @@ -48,8 +51,10 @@ async def test_accuweather_system_health_fail( await hass.async_block_till_done() hass.data[DOMAIN] = {} - hass.data[DOMAIN]["0123xyz"] = {} - hass.data[DOMAIN]["0123xyz"] = Mock(accuweather=Mock(requests_remaining="0")) + hass.data[DOMAIN]["0123xyz"] = AccuWeatherData( + coordinator_observation=Mock(accuweather=Mock(requests_remaining="0")), + coordinator_daily_forecast=Mock(), + ) info = await get_system_health_info(hass, DOMAIN) From e0b58c3f450d774f2678748bbcdeb2982f77aa0c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 10:41:11 +0200 Subject: [PATCH 357/426] Move thread safety check in async_register/async_remove (#116077) --- homeassistant/core.py | 44 +++++++++++++++++++++++++++++++++++++++---- tests/test_core.py | 23 ++++++++++++++++++++++ 2 files changed, 63 insertions(+), 4 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 189dc2f9d8a..a3150adc221 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -2456,7 +2456,7 @@ class ServiceRegistry: """ run_callback_threadsafe( self._hass.loop, - self.async_register, + self._async_register, domain, service, service_func, @@ -2484,6 +2484,33 @@ class ServiceRegistry: Schema is called to coerce and validate the service data. + This method must be run in the event loop. + """ + self._hass.verify_event_loop_thread("async_register") + self._async_register( + domain, service, service_func, schema, supports_response, job_type + ) + + @callback + def _async_register( + self, + domain: str, + service: str, + service_func: Callable[ + [ServiceCall], + Coroutine[Any, Any, ServiceResponse | EntityServiceResponse] + | ServiceResponse + | EntityServiceResponse + | None, + ], + schema: vol.Schema | None = None, + supports_response: SupportsResponse = SupportsResponse.NONE, + job_type: HassJobType | None = None, + ) -> None: + """Register a service. + + Schema is called to coerce and validate the service data. + This method must be run in the event loop. """ domain = domain.lower() @@ -2502,20 +2529,29 @@ class ServiceRegistry: else: self._services[domain] = {service: service_obj} - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( EVENT_SERVICE_REGISTERED, {ATTR_DOMAIN: domain, ATTR_SERVICE: service} ) def remove(self, domain: str, service: str) -> None: """Remove a registered service from service handler.""" run_callback_threadsafe( - self._hass.loop, self.async_remove, domain, service + self._hass.loop, self._async_remove, domain, service ).result() @callback def async_remove(self, domain: str, service: str) -> None: """Remove a registered service from service handler. + This method must be run in the event loop. + """ + self._hass.verify_event_loop_thread("async_remove") + self._async_remove(domain, service) + + @callback + def _async_remove(self, domain: str, service: str) -> None: + """Remove a registered service from service handler. + This method must be run in the event loop. """ domain = domain.lower() @@ -2530,7 +2566,7 @@ class ServiceRegistry: if not self._services[domain]: self._services.pop(domain) - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( EVENT_SERVICE_REMOVED, {ATTR_DOMAIN: domain, ATTR_SERVICE: service} ) diff --git a/tests/test_core.py b/tests/test_core.py index 6bab89bca85..a553d5bbbed 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -3457,3 +3457,26 @@ async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: await hass.async_add_executor_job(hass.bus.async_fire, "test_event") assert len(events) == 1 + + +async def test_async_register_thread_safety(hass: HomeAssistant) -> None: + """Test async_register thread safety.""" + with pytest.raises( + RuntimeError, match="Detected code that calls async_register from a thread." + ): + await hass.async_add_executor_job( + hass.services.async_register, + "test_domain", + "test_service", + lambda call: None, + ) + + +async def test_async_remove_thread_safety(hass: HomeAssistant) -> None: + """Test async_remove thread safety.""" + with pytest.raises( + RuntimeError, match="Detected code that calls async_remove from a thread." + ): + await hass.async_add_executor_job( + hass.services.async_remove, "test_domain", "test_service" + ) From 1120246194affc07e7564fee03d83b18d2d4140d Mon Sep 17 00:00:00 2001 From: rlippmann <70883373+rlippmann@users.noreply.github.com> Date: Wed, 24 Apr 2024 05:13:07 -0400 Subject: [PATCH 358/426] Deprecate relative_time() in favor of time_since() and time_until() (#111177) * add time_since/time_until. add deprecation of relative_time * fix merge conflicts * Apply suggestions from code review * Update homeassistant/helpers/template.py * Update homeassistant/helpers/template.py * Update homeassistant/helpers/template.py --------- Co-authored-by: Erik Montnemery --- .../components/homeassistant/strings.json | 4 + homeassistant/helpers/template.py | 74 ++++ homeassistant/util/dt.py | 82 +++-- tests/helpers/test_template.py | 334 +++++++++++++++++- tests/util/test_dt.py | 67 ++++ 5 files changed, 539 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 09b2f17c947..5cdd47d8be4 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -56,6 +56,10 @@ "config_entry_reauth": { "title": "[%key:common::config_flow::title::reauth%]", "description": "Reauthentication is needed" + }, + "template_function_relative_time_deprecated": { + "title": "The {relative_time} template function is deprecated", + "description": "The {relative_time} template function is deprecated in Home Assistant. Please use the {time_since} or {time_until} template functions instead." } }, "system_health": { diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 24baab96a4e..335d6842548 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -59,6 +59,7 @@ from homeassistant.const import ( UnitOfLength, ) from homeassistant.core import ( + DOMAIN as HA_DOMAIN, Context, HomeAssistant, State, @@ -2480,6 +2481,29 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: If the input are not a datetime object the input will be returned unmodified. """ + + def warn_relative_time_deprecated() -> None: + ir = issue_registry.async_get(hass) + issue_id = "template_function_relative_time_deprecated" + if ir.async_get_issue(HA_DOMAIN, issue_id): + return + issue_registry.async_create_issue( + hass, + HA_DOMAIN, + issue_id, + breaks_in_ha_version="2024.11.0", + is_fixable=False, + severity=issue_registry.IssueSeverity.WARNING, + translation_key=issue_id, + translation_placeholders={ + "relative_time": "relative_time()", + "time_since": "time_since()", + "time_until": "time_until()", + }, + ) + _LOGGER.warning("Template function 'relative_time' is deprecated") + + warn_relative_time_deprecated() if (render_info := _render_info.get()) is not None: render_info.has_time = True @@ -2492,6 +2516,50 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: return dt_util.get_age(value) +def time_since(hass: HomeAssistant, value: Any | datetime, precision: int = 1) -> Any: + """Take a datetime and return its "age" as a string. + + The age can be in seconds, minutes, hours, days, months and year. + + precision is the number of units to return, with the last unit rounded. + + If the value not a datetime object the input will be returned unmodified. + """ + if (render_info := _render_info.get()) is not None: + render_info.has_time = True + + if not isinstance(value, datetime): + return value + if not value.tzinfo: + value = dt_util.as_local(value) + if dt_util.now() < value: + return value + + return dt_util.get_age(value, precision) + + +def time_until(hass: HomeAssistant, value: Any | datetime, precision: int = 1) -> Any: + """Take a datetime and return the amount of time until that time as a string. + + The time until can be in seconds, minutes, hours, days, months and years. + + precision is the number of units to return, with the last unit rounded. + + If the value not a datetime object the input will be returned unmodified. + """ + if (render_info := _render_info.get()) is not None: + render_info.has_time = True + + if not isinstance(value, datetime): + return value + if not value.tzinfo: + value = dt_util.as_local(value) + if dt_util.now() > value: + return value + + return dt_util.get_time_remaining(value, precision) + + def urlencode(value): """Urlencode dictionary and return as UTF-8 string.""" return urllib_urlencode(value).encode("utf-8") @@ -2890,6 +2958,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): "floor_id", "floor_name", "relative_time", + "time_since", + "time_until", "today_at", "label_id", "label_name", @@ -2946,6 +3016,10 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["now"] = hassfunction(now) self.globals["relative_time"] = hassfunction(relative_time) self.filters["relative_time"] = self.globals["relative_time"] + self.globals["time_since"] = hassfunction(time_since) + self.filters["time_since"] = self.globals["time_since"] + self.globals["time_until"] = hassfunction(time_until) + self.filters["time_until"] = self.globals["time_until"] self.globals["today_at"] = hassfunction(today_at) self.filters["today_at"] = self.globals["today_at"] diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index 2f2b415144f..923838a48a5 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -286,36 +286,78 @@ def parse_time(time_str: str) -> dt.time | None: return None -def get_age(date: dt.datetime) -> str: - """Take a datetime and return its "age" as a string. - - The age can be in second, minute, hour, day, month or year. Only the - biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will - be returned. - Make sure date is not in the future, or else it won't work. - """ +def _get_timestring(timediff: float, precision: int = 1) -> str: + """Return a string representation of a time diff.""" def formatn(number: int, unit: str) -> str: """Add "unit" if it's plural.""" if number == 1: - return f"1 {unit}" - return f"{number:d} {unit}s" + return f"1 {unit} " + return f"{number:d} {unit}s " + + if timediff == 0.0: + return "0 seconds" + + units = ("year", "month", "day", "hour", "minute", "second") + + factors = (365 * 24 * 60 * 60, 30 * 24 * 60 * 60, 24 * 60 * 60, 60 * 60, 60, 1) + + result_string: str = "" + current_precision = 0 + + for i, current_factor in enumerate(factors): + selected_unit = units[i] + if timediff < current_factor: + continue + current_precision = current_precision + 1 + if current_precision == precision: + return ( + result_string + formatn(round(timediff / current_factor), selected_unit) + ).rstrip() + curr_diff = int(timediff // current_factor) + result_string += formatn(curr_diff, selected_unit) + timediff -= (curr_diff) * current_factor + + return result_string.rstrip() + + +def get_age(date: dt.datetime, precision: int = 1) -> str: + """Take a datetime and return its "age" as a string. + + The age can be in second, minute, hour, day, month and year. + + depth number of units will be returned, with the last unit rounded + + The date must be in the past or a ValueException will be raised. + """ delta = (now() - date).total_seconds() + rounded_delta = round(delta) - units = ["second", "minute", "hour", "day", "month"] - factors = [60, 60, 24, 30, 12] - selected_unit = "year" + if rounded_delta < 0: + raise ValueError("Time value is in the future") + return _get_timestring(rounded_delta, precision) - for i, next_factor in enumerate(factors): - if rounded_delta < next_factor: - selected_unit = units[i] - break - delta /= next_factor - rounded_delta = round(delta) - return formatn(rounded_delta, selected_unit) +def get_time_remaining(date: dt.datetime, precision: int = 1) -> str: + """Take a datetime and return its "age" as a string. + + The age can be in second, minute, hour, day, month and year. + + depth number of units will be returned, with the last unit rounded + + The date must be in the future or a ValueException will be raised. + """ + + delta = (date - now()).total_seconds() + + rounded_delta = round(delta) + + if rounded_delta < 0: + raise ValueError("Time value is in the past") + + return _get_timestring(rounded_delta, precision) def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> list[int]: diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index f55a94d7283..d134570d119 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -31,7 +31,7 @@ from homeassistant.const import ( UnitOfTemperature, UnitOfVolume, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant from homeassistant.exceptions import TemplateError from homeassistant.helpers import ( area_registry as ar, @@ -2240,6 +2240,7 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: """Test relative_time method.""" hass.config.set_time_zone("UTC") now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + issue_registry = ir.async_get(hass) relative_time_template = ( '{{relative_time(strptime("2000-01-01 09:00:00", "%Y-%m-%d %H:%M:%S"))}}' ) @@ -2249,7 +2250,9 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: hass, ).async_render() assert result == "1 hour" - + assert issue_registry.async_get_issue( + HA_DOMAIN, "template_function_relative_time_deprecated" + ) result = template.Template( ( "{{" @@ -2308,6 +2311,333 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: assert info.has_time is True +@patch( + "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", + return_value=True, +) +def test_time_since(mock_is_safe, hass: HomeAssistant) -> None: + """Test time_since method.""" + hass.config.set_time_zone("UTC") + now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + time_since_template = ( + '{{time_since(strptime("2000-01-01 09:00:00", "%Y-%m-%d %H:%M:%S"))}}' + ) + with freeze_time(now): + result = template.Template( + time_since_template, + hass, + ).async_render() + assert result == "1 hour" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 09:00:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 03:00:00 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour" + + result1 = str( + template.strptime("2000-01-01 11:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 2" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 09:05:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=2" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 55 minutes" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 3" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "1999-02-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 0" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "11 months 4 days 1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "1999-02-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "11 months" + result1 = str( + template.strptime("2000-01-01 11:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=3" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + '{{time_since("string")}}', + hass, + ).async_render() + assert result == "string" + + info = template.Template(time_since_template, hass).async_render_to_info() + assert info.has_time is True + + +@patch( + "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", + return_value=True, +) +def test_time_until(mock_is_safe, hass: HomeAssistant) -> None: + """Test time_until method.""" + hass.config.set_time_zone("UTC") + now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + time_until_template = ( + '{{time_until(strptime("2000-01-01 11:00:00", "%Y-%m-%d %H:%M:%S"))}}' + ) + with freeze_time(now): + result = template.Template( + time_until_template, + hass, + ).async_render() + assert result == "1 hour" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 13:00:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:00:00 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour" + + result1 = str( + template.strptime("2000-01-01 09:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 09:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 2" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 12:05:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=2" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 5 minutes" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 3" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2001-02-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 0" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 year 1 month 2 days 1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2001-02-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 4" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 year 1 month 2 days 2 hours" + result1 = str( + template.strptime("2000-01-01 09:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 09:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=3" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + '{{time_until("string")}}', + hass, + ).async_render() + assert result == "string" + + info = template.Template(time_until_template, hass).async_render_to_info() + assert info.has_time is True + + @patch( "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", return_value=True, diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 7ed8154f033..215524c426b 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -178,12 +178,18 @@ def test_get_age() -> None: """Test get_age.""" diff = dt_util.now() - timedelta(seconds=0) assert dt_util.get_age(diff) == "0 seconds" + assert dt_util.get_age(diff, precision=2) == "0 seconds" diff = dt_util.now() - timedelta(seconds=1) assert dt_util.get_age(diff) == "1 second" + assert dt_util.get_age(diff, precision=2) == "1 second" + + diff = dt_util.now() + timedelta(seconds=1) + pytest.raises(ValueError, dt_util.get_age, diff) diff = dt_util.now() - timedelta(seconds=30) assert dt_util.get_age(diff) == "30 seconds" + diff = dt_util.now() + timedelta(seconds=30) diff = dt_util.now() - timedelta(minutes=5) assert dt_util.get_age(diff) == "5 minutes" @@ -196,20 +202,81 @@ def test_get_age() -> None: diff = dt_util.now() - timedelta(minutes=320) assert dt_util.get_age(diff) == "5 hours" + assert dt_util.get_age(diff, precision=2) == "5 hours 20 minutes" + assert dt_util.get_age(diff, precision=3) == "5 hours 20 minutes" diff = dt_util.now() - timedelta(minutes=1.6 * 60 * 24) assert dt_util.get_age(diff) == "2 days" + assert dt_util.get_age(diff, precision=2) == "1 day 14 hours" + assert dt_util.get_age(diff, precision=3) == "1 day 14 hours 24 minutes" + diff = dt_util.now() + timedelta(minutes=1.6 * 60 * 24) + pytest.raises(ValueError, dt_util.get_age, diff) diff = dt_util.now() - timedelta(minutes=2 * 60 * 24) assert dt_util.get_age(diff) == "2 days" diff = dt_util.now() - timedelta(minutes=32 * 60 * 24) assert dt_util.get_age(diff) == "1 month" + assert dt_util.get_age(diff, precision=10) == "1 month 2 days" + + diff = dt_util.now() - timedelta(minutes=32 * 60 * 24 + 1) + assert dt_util.get_age(diff, precision=3) == "1 month 2 days 1 minute" diff = dt_util.now() - timedelta(minutes=365 * 60 * 24) assert dt_util.get_age(diff) == "1 year" +def test_time_remaining() -> None: + """Test get_age.""" + diff = dt_util.now() + timedelta(seconds=0) + assert dt_util.get_time_remaining(diff) == "0 seconds" + assert dt_util.get_time_remaining(diff) == "0 seconds" + assert dt_util.get_time_remaining(diff, precision=2) == "0 seconds" + + diff = dt_util.now() + timedelta(seconds=1) + assert dt_util.get_time_remaining(diff) == "1 second" + + diff = dt_util.now() - timedelta(seconds=1) + pytest.raises(ValueError, dt_util.get_time_remaining, diff) + + diff = dt_util.now() + timedelta(seconds=30) + assert dt_util.get_time_remaining(diff) == "30 seconds" + + diff = dt_util.now() + timedelta(minutes=5) + assert dt_util.get_time_remaining(diff) == "5 minutes" + + diff = dt_util.now() + timedelta(minutes=1) + assert dt_util.get_time_remaining(diff) == "1 minute" + + diff = dt_util.now() + timedelta(minutes=300) + assert dt_util.get_time_remaining(diff) == "5 hours" + + diff = dt_util.now() + timedelta(minutes=320) + assert dt_util.get_time_remaining(diff) == "5 hours" + assert dt_util.get_time_remaining(diff, precision=2) == "5 hours 20 minutes" + assert dt_util.get_time_remaining(diff, precision=3) == "5 hours 20 minutes" + + diff = dt_util.now() + timedelta(minutes=1.6 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "2 days" + assert dt_util.get_time_remaining(diff, precision=2) == "1 day 14 hours" + assert dt_util.get_time_remaining(diff, precision=3) == "1 day 14 hours 24 minutes" + diff = dt_util.now() - timedelta(minutes=1.6 * 60 * 24) + pytest.raises(ValueError, dt_util.get_time_remaining, diff) + + diff = dt_util.now() + timedelta(minutes=2 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "2 days" + + diff = dt_util.now() + timedelta(minutes=32 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "1 month" + assert dt_util.get_time_remaining(diff, precision=10) == "1 month 2 days" + + diff = dt_util.now() + timedelta(minutes=32 * 60 * 24 + 1) + assert dt_util.get_time_remaining(diff, precision=3) == "1 month 2 days 1 minute" + + diff = dt_util.now() + timedelta(minutes=365 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "1 year" + + def test_parse_time_expression() -> None: """Test parse_time_expression.""" assert list(range(60)) == dt_util.parse_time_expression("*", 0, 59) From e9e401ae2929591ba4fceeec8b17f9f5302c9b5d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 11:26:48 +0200 Subject: [PATCH 359/426] Migrate discovery debouncer callback to async_fire_internal (#116078) --- homeassistant/config_entries.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index bf576b517d3..0637e5f7c87 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -1405,7 +1405,9 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): @callback def _async_discovery(self) -> None: """Handle discovery.""" - self.hass.bus.async_fire(EVENT_FLOW_DISCOVERED) + # async_fire_internal is used here because this is only + # called from the Debouncer so we know the usage is safe + self.hass.bus.async_fire_internal(EVENT_FLOW_DISCOVERED) persistent_notification.async_create( self.hass, title="New devices discovered", From e3984cd50ae4d13f5f8c51567cfa65a5b49b19f9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 12:06:52 +0200 Subject: [PATCH 360/426] Temporary CI workaround for broken microsoft ubuntu repo (#116091) --- .github/workflows/ci.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5d38b0480b7..320c2e8d280 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -481,7 +481,7 @@ jobs: - name: Install additional OS dependencies if: steps.cache-venv.outputs.cache-hit != 'true' run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ @@ -694,7 +694,7 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ @@ -754,7 +754,7 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ @@ -869,7 +869,7 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ @@ -991,7 +991,7 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ @@ -1132,7 +1132,7 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update + sudo apt-get update || true sudo apt-get -y install \ bluez \ ffmpeg \ From df12789e0802adea8830f58d0c2586cfe88a3a8c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 12:46:16 +0200 Subject: [PATCH 361/426] Remove duplicate async_write_ha_state thread safety check (#116086) --- homeassistant/helpers/entity.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index cf21882eec8..a91b4c32d21 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -972,8 +972,6 @@ class Entity( """Verify the entity is in a writable state.""" if self.hass is None: raise RuntimeError(f"Attribute hass is None for {self}") - if self.hass.config.debug: - self.hass.verify_event_loop_thread("async_write_ha_state") # The check for self.platform guards against integrations not using an # EntityComponent and can be removed in HA Core 2024.1 From d17e9bfc99756803bb4b816a65fad3ae07aed686 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 12:55:09 +0200 Subject: [PATCH 362/426] Enable debug mode if asyncio debug is on at startup (#116084) --- homeassistant/bootstrap.py | 4 +++- tests/test_bootstrap.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 10ba0392f15..cbc808eb0fa 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -253,8 +253,9 @@ async def async_setup_hass( runtime_config.log_no_color, ) - if runtime_config.debug: + if runtime_config.debug or hass.loop.get_debug(): hass.config.debug = True + hass.config.safe_mode = runtime_config.safe_mode hass.config.skip_pip = runtime_config.skip_pip hass.config.skip_pip_packages = runtime_config.skip_pip_packages @@ -318,6 +319,7 @@ async def async_setup_hass( hass = core.HomeAssistant(old_config.config_dir) if old_logging: hass.data[DATA_LOGGING] = old_logging + hass.config.debug = old_config.debug hass.config.skip_pip = old_config.skip_pip hass.config.skip_pip_packages = old_config.skip_pip_packages hass.config.internal_url = old_config.internal_url diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 6b96fb43d1f..2e35e4ffddb 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -122,6 +122,38 @@ async def test_config_does_not_turn_off_debug(hass: HomeAssistant) -> None: assert hass.config.debug is True +@pytest.mark.parametrize("hass_config", [{"frontend": {}}]) +async def test_asyncio_debug_on_turns_hass_debug_on( + mock_hass_config: None, + mock_enable_logging: Mock, + mock_is_virtual_env: Mock, + mock_mount_local_lib_path: AsyncMock, + mock_ensure_config_exists: AsyncMock, + mock_process_ha_config_upgrade: Mock, +) -> None: + """Test that asyncio debug turns on hass debug.""" + asyncio.get_running_loop().set_debug(True) + + verbose = Mock() + log_rotate_days = Mock() + log_file = Mock() + log_no_color = Mock() + + hass = await bootstrap.async_setup_hass( + runner.RuntimeConfig( + config_dir=get_test_config_dir(), + verbose=verbose, + log_rotate_days=log_rotate_days, + log_file=log_file, + log_no_color=log_no_color, + skip_pip=True, + recovery_mode=False, + ), + ) + + assert hass.config.debug is True + + @pytest.mark.parametrize("load_registries", [False]) async def test_preload_translations(hass: HomeAssistant) -> None: """Test translations are preloaded for all frontend deps and base platforms.""" From 9fcb774252db4a0c7cd18276924a4f0d24570cb2 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 24 Apr 2024 13:06:14 +0200 Subject: [PATCH 363/426] Add reconfigure flow to AVM Fritz!SmartHome (#116047) --- .../components/fritzbox/config_flow.py | 41 ++++++++++ .../components/fritzbox/strings.json | 12 ++- tests/components/fritzbox/test_config_flow.py | 81 ++++++++++++++++++- 3 files changed, 132 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index c89415fa7ee..62f189b542f 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -221,3 +221,44 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"name": self._name}, errors=errors, ) + + async def async_step_reconfigure( + self, _: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry is not None + self._entry = entry + self._name = self._entry.data[CONF_HOST] + self._host = self._entry.data[CONF_HOST] + self._username = self._entry.data[CONF_USERNAME] + self._password = self._entry.data[CONF_PASSWORD] + + return await self.async_step_reconfigure_confirm() + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + errors = {} + + if user_input is not None: + self._host = user_input[CONF_HOST] + + result = await self.hass.async_add_executor_job(self._try_connect) + + if result == RESULT_SUCCESS: + await self._update_entry() + return self.async_abort(reason="reconfigure_successful") + errors["base"] = result + + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=self._host): str, + } + ), + description_placeholders={"name": self._name}, + errors=errors, + ) diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index f4d2fe3670e..755cc97d7d8 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -26,6 +26,15 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure_confirm": { + "description": "Update your configuration information for {name}.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router." + } } }, "abort": { @@ -34,7 +43,8 @@ "ignore_ip6_link_local": "IPv6 link local address is not supported.", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 53a4f1c5205..72d36a8ab63 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,7 +12,12 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -202,6 +207,80 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: assert result["reason"] == "no_devices_found" +async def test_reconfigure_success(hass: HomeAssistant, fritz: Mock) -> None: + """Test starting a reconfigure flow.""" + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + assert mock_config.data[CONF_HOST] == "10.0.0.1" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data[CONF_HOST] == "new_host" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + +async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: + """Test starting a reconfigure flow with failure.""" + fritz().login.side_effect = [OSError("Boom"), None] + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + assert mock_config.data[CONF_HOST] == "10.0.0.1" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"]["base"] == "no_devices_found" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data[CONF_HOST] == "new_host" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + @pytest.mark.parametrize( ("test_data", "expected_result"), [ From a752f8e7d7b369fc756e5ac3bb7cf35f6c591bd1 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 24 Apr 2024 13:17:01 +0200 Subject: [PATCH 364/426] Remove microsoft apt package list before update (#116097) --- .github/workflows/ci.yaml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 320c2e8d280..115c1a932ea 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -481,7 +481,8 @@ jobs: - name: Install additional OS dependencies if: steps.cache-venv.outputs.cache-hit != 'true' run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ @@ -694,7 +695,8 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ @@ -754,7 +756,8 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ @@ -869,7 +872,8 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ @@ -991,7 +995,8 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ @@ -1132,7 +1137,8 @@ jobs: steps: - name: Install additional OS dependencies run: | - sudo apt-get update || true + sudo rm /etc/apt/sources.list.d/microsoft-prod.list + sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ From bfed682abe50161f1326fd3b4b85912319a81da2 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 24 Apr 2024 13:18:09 +0200 Subject: [PATCH 365/426] =?UTF-8?q?Mark=20Tankerkoenig=20as=20Platinum=20?= =?UTF-8?q?=F0=9F=8F=86=20integration=20(#115917)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/tankerkoenig/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/tankerkoenig/manifest.json b/homeassistant/components/tankerkoenig/manifest.json index 4570d0e5649..c754094655d 100644 --- a/homeassistant/components/tankerkoenig/manifest.json +++ b/homeassistant/components/tankerkoenig/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/tankerkoenig", "iot_class": "cloud_polling", "loggers": ["aiotankerkoenig"], + "quality_scale": "platinum", "requirements": ["aiotankerkoenig==0.4.1"] } From 5aa61cb6d593a4d4e4dfbe438ae2686726ad8acd Mon Sep 17 00:00:00 2001 From: Andy Date: Wed, 24 Apr 2024 13:19:50 +0200 Subject: [PATCH 366/426] Extend options for ecovacs lifespans (#116094) Co-authored-by: Robert Resch --- homeassistant/components/ecovacs/const.py | 2 + homeassistant/components/ecovacs/icons.json | 12 + homeassistant/components/ecovacs/strings.json | 12 + .../ecovacs/snapshots/test_button.ambr | 92 +++ .../ecovacs/snapshots/test_sensor.ambr | 630 +++++++++++++++++- tests/components/ecovacs/test_button.py | 22 +- tests/components/ecovacs/test_init.py | 2 +- tests/components/ecovacs/test_sensor.py | 28 + 8 files changed, 772 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/ecovacs/const.py b/homeassistant/components/ecovacs/const.py index e5ef0760182..6b77404e935 100644 --- a/homeassistant/components/ecovacs/const.py +++ b/homeassistant/components/ecovacs/const.py @@ -12,8 +12,10 @@ CONF_OVERRIDE_MQTT_URL = "override_mqtt_url" CONF_VERIFY_MQTT_CERTIFICATE = "verify_mqtt_certificate" SUPPORTED_LIFESPANS = ( + LifeSpan.BLADE, LifeSpan.BRUSH, LifeSpan.FILTER, + LifeSpan.LENS_BRUSH, LifeSpan.SIDE_BRUSH, ) diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 2e2d897c455..44c577104dd 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -12,12 +12,18 @@ "relocate": { "default": "mdi:map-marker-question" }, + "reset_lifespan_blade": { + "default": "mdi:saw-blade" + }, "reset_lifespan_brush": { "default": "mdi:broom" }, "reset_lifespan_filter": { "default": "mdi:air-filter" }, + "reset_lifespan_lens_brush": { + "default": "mdi:broom" + }, "reset_lifespan_side_brush": { "default": "mdi:broom" } @@ -42,12 +48,18 @@ "error": { "default": "mdi:alert-circle" }, + "lifespan_blade": { + "default": "mdi:saw-blade" + }, "lifespan_brush": { "default": "mdi:broom" }, "lifespan_filter": { "default": "mdi:air-filter" }, + "lifespan_lens_brush": { + "default": "mdi:broom" + }, "lifespan_side_brush": { "default": "mdi:broom" }, diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index 50afd21deb3..bb27bd6941d 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -46,12 +46,18 @@ "relocate": { "name": "Relocate" }, + "reset_lifespan_blade": { + "name": "Reset blade lifespan" + }, "reset_lifespan_brush": { "name": "Reset main brush lifespan" }, "reset_lifespan_filter": { "name": "Reset filter lifespan" }, + "reset_lifespan_lens_brush": { + "name": "Reset lens brush lifespan" + }, "reset_lifespan_side_brush": { "name": "Reset side brushes lifespan" } @@ -92,12 +98,18 @@ } } }, + "lifespan_blade": { + "name": "Blade lifespan" + }, "lifespan_brush": { "name": "Main brush lifespan" }, "lifespan_filter": { "name": "Filter lifespan" }, + "lifespan_lens_brush": { + "name": "Lens brush lifespan" + }, "lifespan_side_brush": { "name": "Side brushes lifespan" }, diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index 816551f7e6a..d250a60a35f 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_buttons[5xu9h3][button.goat_g1_reset_blade_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.goat_g1_reset_blade_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset blade lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_blade', + 'unique_id': '8516fbb1-17f1-4194-0000000_reset_lifespan_blade', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_blade_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Reset blade lifespan', + }), + 'context': , + 'entity_id': 'button.goat_g1_reset_blade_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_lens_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.goat_g1_reset_lens_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset lens brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_lens_brush', + 'unique_id': '8516fbb1-17f1-4194-0000000_reset_lifespan_lens_brush', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_lens_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Reset lens brush lifespan', + }), + 'context': , + 'entity_id': 'button.goat_g1_reset_lens_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- # name: test_buttons[yna5x1][button.ozmo_950_relocate:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index b35310158f2..e2cee3d410f 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,5 +1,583 @@ # serializer version: 1 -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_area_cleaned:entity-registry] +# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000000_stats_area', + 'unit_of_measurement': 'm²', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Area cleaned', + 'unit_of_measurement': 'm²', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_battery:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '8516fbb1-17f1-4194-0000000_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_battery:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Goat G1 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_blade_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_blade_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Blade lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_blade', + 'unique_id': '8516fbb1-17f1-4194-0000000_lifespan_blade', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_blade_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Blade lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_blade_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000000_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Goat G1 Cleaning duration', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.0', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_error:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error', + 'unique_id': '8516fbb1-17f1-4194-0000000_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_error:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'description': 'NoError: Robot is operational', + 'friendly_name': 'Goat G1 Error', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_ip_address:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ip', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_ip', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_ip_address:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 IP address', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.0.10', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_lens_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_lens_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lens brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_lens_brush', + 'unique_id': '8516fbb1-17f1-4194-0000000_lifespan_lens_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_lens_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Lens brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_lens_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area', + 'unit_of_measurement': 'm²', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Total area cleaned', + 'state_class': , + 'unit_of_measurement': 'm²', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Goat G1 Total cleaning duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.000', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleanings:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_cleanings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total cleanings', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_cleanings', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_cleanings', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleanings:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Total cleanings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_cleanings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_rssi:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_wi_fi_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi RSSI', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_rssi', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_rssi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_rssi:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Wi-Fi RSSI', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_wi_fi_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-62', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi SSID', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ssid', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Wi-Fi SSID', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_wi_fi_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Testnetwork', + }) +# --- +# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -32,7 +610,7 @@ 'unit_of_measurement': 'm²', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_area_cleaned:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Area cleaned', @@ -46,7 +624,7 @@ 'state': '10', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_battery:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_battery:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -79,7 +657,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_battery:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_battery:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', @@ -94,7 +672,7 @@ 'state': '100', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_cleaning_duration:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_cleaning_duration:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -130,7 +708,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_cleaning_duration:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_cleaning_duration:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -145,7 +723,7 @@ 'state': '5.0', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_error:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_error:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -178,7 +756,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_error:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_error:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'description': 'NoError: Robot is operational', @@ -192,7 +770,7 @@ 'state': '0', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_filter_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_filter_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -225,7 +803,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_filter_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_filter_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Filter lifespan', @@ -239,7 +817,7 @@ 'state': '56', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_ip_address:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_ip_address:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -272,7 +850,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_ip_address:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_ip_address:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 IP address', @@ -285,7 +863,7 @@ 'state': '192.168.0.10', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_main_brush_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_main_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -318,7 +896,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_main_brush_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_main_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Main brush lifespan', @@ -332,7 +910,7 @@ 'state': '80', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_side_brushes_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -365,7 +943,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_side_brushes_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Side brushes lifespan', @@ -379,7 +957,7 @@ 'state': '40', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_area_cleaned:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -414,7 +992,7 @@ 'unit_of_measurement': 'm²', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_area_cleaned:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total area cleaned', @@ -429,7 +1007,7 @@ 'state': '60', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleaning_duration:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleaning_duration:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -467,7 +1045,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleaning_duration:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleaning_duration:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -483,7 +1061,7 @@ 'state': '40.000', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleanings:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleanings:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -518,7 +1096,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleanings:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleanings:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total cleanings', @@ -532,7 +1110,7 @@ 'state': '123', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_rssi:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_rssi:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -565,7 +1143,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_rssi:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_rssi:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Wi-Fi RSSI', @@ -578,7 +1156,7 @@ 'state': '-62', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_ssid:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_ssid:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -611,7 +1189,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_ssid:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_ssid:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Wi-Fi SSID', diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 8e583e6342b..277983eb0c5 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -48,8 +48,21 @@ def platforms() -> Platform | list[Platform]: ), ], ), + ( + "5xu9h3", + [ + ( + "button.goat_g1_reset_blade_lifespan", + ResetLifeSpan(LifeSpan.BLADE), + ), + ( + "button.goat_g1_reset_lens_brush_lifespan", + ResetLifeSpan(LifeSpan.LENS_BRUSH), + ), + ], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_buttons( hass: HomeAssistant, @@ -98,6 +111,13 @@ async def test_buttons( "button.ozmo_950_reset_side_brushes_lifespan", ], ), + ( + "5xu9h3", + [ + "button.goat_g1_reset_blade_lifespan", + "button.goat_g1_reset_lens_brush_lifespan", + ], + ), ], ) async def test_disabled_by_default_buttons( diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index 7780b86d714..c27da2196b1 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -122,7 +122,7 @@ async def test_devices_in_dr( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 20), + ("5xu9h3", 24), ], ) async def test_all_entities_loaded( diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 7ff4ab3f009..5b8bf18e1d8 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -69,7 +69,25 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.ozmo_950_error", ], ), + ( + "5xu9h3", + [ + "sensor.goat_g1_area_cleaned", + "sensor.goat_g1_cleaning_duration", + "sensor.goat_g1_total_area_cleaned", + "sensor.goat_g1_total_cleaning_duration", + "sensor.goat_g1_total_cleanings", + "sensor.goat_g1_battery", + "sensor.goat_g1_ip_address", + "sensor.goat_g1_wi_fi_rssi", + "sensor.goat_g1_wi_fi_ssid", + "sensor.goat_g1_blade_lifespan", + "sensor.goat_g1_lens_brush_lifespan", + "sensor.goat_g1_error", + ], + ), ], + ids=["yna5x1", "5xu9h3"], ) async def test_sensors( hass: HomeAssistant, @@ -111,7 +129,17 @@ async def test_sensors( "sensor.ozmo_950_wi_fi_ssid", ], ), + ( + "5xu9h3", + [ + "sensor.goat_g1_error", + "sensor.goat_g1_ip_address", + "sensor.goat_g1_wi_fi_rssi", + "sensor.goat_g1_wi_fi_ssid", + ], + ), ], + ids=["yna5x1", "5xu9h3"], ) async def test_disabled_by_default_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] From 18132916fad71d1c317dc37be68b4c4641b0905e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 24 Apr 2024 13:29:42 +0200 Subject: [PATCH 367/426] Mask current password in MQTT option flow (#116098) * Mask current password in MQTT option flow * Update docstr * Typo --- homeassistant/components/mqtt/config_flow.py | 49 ++++++++++++++------ tests/components/mqtt/test_config_flow.py | 6 +-- 2 files changed, 39 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 8168b997fa6..1a7dfbbc507 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -167,6 +167,29 @@ REAUTH_SCHEMA = vol.Schema( PWD_NOT_CHANGED = "__**password_not_changed**__" +@callback +def update_password_from_user_input( + entry_password: str | None, user_input: dict[str, Any] +) -> dict[str, Any]: + """Update the password if the entry has been updated. + + As we want to avoid reflecting the stored password in the UI, + we replace the suggested value in the UI with a sentitel, + and we change it back here if it was changed. + """ + substituted_used_data = dict(user_input) + # Take out the password submitted + user_password: str | None = substituted_used_data.pop(CONF_PASSWORD, None) + # Only add the password if it has changed. + # If the sentinel password is submitted, we replace that with our current + # password from the config entry data. + password_changed = user_password is not None and user_password != PWD_NOT_CHANGED + password = user_password if password_changed else entry_password + if password is not None: + substituted_used_data[CONF_PASSWORD] = password + return substituted_used_data + + class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" @@ -209,16 +232,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): assert self.entry is not None if user_input: - password_changed = ( - user_password := user_input[CONF_PASSWORD] - ) != PWD_NOT_CHANGED - entry_password = self.entry.data.get(CONF_PASSWORD) - password = user_password if password_changed else entry_password - new_entry_data = { - **self.entry.data, - CONF_USERNAME: user_input.get(CONF_USERNAME), - CONF_PASSWORD: password, - } + substituted_used_data = update_password_from_user_input( + self.entry.data.get(CONF_PASSWORD), user_input + ) + new_entry_data = {**self.entry.data, **substituted_used_data} if await self.hass.async_add_executor_job( try_connection, new_entry_data, @@ -350,13 +367,17 @@ class MQTTOptionsFlowHandler(OptionsFlow): validated_user_input, errors, ): + self.broker_config.update( + update_password_from_user_input( + self.config_entry.data.get(CONF_PASSWORD), validated_user_input + ), + ) can_connect = await self.hass.async_add_executor_job( try_connection, - validated_user_input, + self.broker_config, ) if can_connect: - self.broker_config.update(validated_user_input) return await self.async_step_options() errors["base"] = "cannot_connect" @@ -657,7 +678,9 @@ async def async_get_broker_settings( current_broker = current_config.get(CONF_BROKER) current_port = current_config.get(CONF_PORT, DEFAULT_PORT) current_user = current_config.get(CONF_USERNAME) - current_pass = current_config.get(CONF_PASSWORD) + # Return the sentinel password to avoid exposure + current_entry_pass = current_config.get(CONF_PASSWORD) + current_pass = PWD_NOT_CHANGED if current_entry_pass else None # Treat the previous post as an update of the current settings # (if there was a basic broker setup step) diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 56d19506a66..422ec84c091 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -902,7 +902,7 @@ async def test_option_flow_default_suggested_values( } suggested = { mqtt.CONF_USERNAME: "user", - mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, } for key, value in defaults.items(): assert get_default(result["data_schema"].schema, key) == value @@ -964,7 +964,7 @@ async def test_option_flow_default_suggested_values( } suggested = { mqtt.CONF_USERNAME: "us3r", - mqtt.CONF_PASSWORD: "p4ss", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, } for key, value in defaults.items(): assert get_default(result["data_schema"].schema, key) == value @@ -1329,7 +1329,7 @@ async def test_try_connection_with_advanced_parameters( } suggested = { mqtt.CONF_USERNAME: "user", - mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, mqtt.CONF_TLS_INSECURE: True, mqtt.CONF_PROTOCOL: "3.1.1", mqtt.CONF_TRANSPORT: "websockets", From 0e0b543dec2c5be7ae3f2dcfbec4e5485c1c3875 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 24 Apr 2024 21:30:22 +1000 Subject: [PATCH 368/426] Deprecate speed limit lock in Tessie (#113848) --- homeassistant/components/tessie/lock.py | 65 +++++++++++++++++++- homeassistant/components/tessie/strings.json | 35 +++++++++++ tests/components/tessie/test_lock.py | 55 ++++++++++++++--- 3 files changed, 145 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/tessie/lock.py b/homeassistant/components/tessie/lock.py index 09402055ee8..1e5653744fb 100644 --- a/homeassistant/components/tessie/lock.py +++ b/homeassistant/components/tessie/lock.py @@ -12,10 +12,14 @@ from tessie_api import ( unlock, ) +from homeassistant.components.automation import automations_with_entity from homeassistant.components.lock import ATTR_CODE, LockEntity +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, TessieChargeCableLockStates @@ -29,11 +33,46 @@ async def async_setup_entry( """Set up the Tessie sensor platform from a config entry.""" data = hass.data[DOMAIN][entry.entry_id] - async_add_entities( + entities = [ klass(vehicle.state_coordinator) - for klass in (TessieLockEntity, TessieCableLockEntity, TessieSpeedLimitEntity) + for klass in (TessieLockEntity, TessieCableLockEntity) for vehicle in data - ) + ] + + ent_reg = er.async_get(hass) + + for vehicle in data: + entity_id = ent_reg.async_get_entity_id( + Platform.LOCK, + DOMAIN, + f"{vehicle.state_coordinator.vin}-vehicle_state_speed_limit_mode_active", + ) + if entity_id: + entity_entry = ent_reg.async_get(entity_id) + assert entity_entry + if entity_entry.disabled: + ent_reg.async_remove(entity_id) + else: + entities.append(TessieSpeedLimitEntity(vehicle.state_coordinator)) + + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + for item in entity_automations + entity_scripts: + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_speed_limit_{entity_id}_{item}", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_entity", + translation_placeholders={ + "entity": entity_id, + "info": item, + }, + ) + async_add_entities(entities) class TessieLockEntity(TessieEntity, LockEntity): @@ -81,6 +120,16 @@ class TessieSpeedLimitEntity(TessieEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Enable speed limit with pin.""" + ir.async_create_issue( + self.coordinator.hass, + DOMAIN, + "deprecated_speed_limit_locked", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_locked", + ) code: str | None = kwargs.get(ATTR_CODE) if code: await self.run(enable_speed_limit, pin=code) @@ -88,6 +137,16 @@ class TessieSpeedLimitEntity(TessieEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Disable speed limit with pin.""" + ir.async_create_issue( + self.coordinator.hass, + DOMAIN, + "deprecated_speed_limit_unlocked", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_unlocked", + ) code: str | None = kwargs.get(ATTR_CODE) if code: await self.run(disable_speed_limit, pin=code) diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 8e1e47f934f..ea75660ddb7 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -410,5 +410,40 @@ "no_cable": { "message": "Insert cable to lock" } + }, + "issues": { + "deprecated_speed_limit_entity": { + "title": "Detected Tessie speed limit lock entity usage", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_entity::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the speed limit lock entity from `{info}` then click submit to fix this issue." + } + } + } + }, + "deprecated_speed_limit_locked": { + "title": "Detected Tessie speed limit lock entity locked", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_locked::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." + } + } + } + }, + "deprecated_speed_limit_unlocked": { + "title": "Detected Tessie speed limit lock entity unlocked", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_unlocked::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." + } + } + } + } } } diff --git a/tests/components/tessie/test_lock.py b/tests/components/tessie/test_lock.py index ca921583d97..0371b592f07 100644 --- a/tests/components/tessie/test_lock.py +++ b/tests/components/tessie/test_lock.py @@ -15,8 +15,9 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED, Pl from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.issue_registry import async_get as async_get_issue_registry -from .common import assert_entities, setup_platform +from .common import DOMAIN, assert_entities, setup_platform async def test_locks( @@ -24,6 +25,17 @@ async def test_locks( ) -> None: """Tests that the lock entity is correct.""" + # Create the deprecated speed limit lock entity + entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + disabled_by=er.RegistryEntryDisabler.INTEGRATION, + ) + entry = await setup_platform(hass, [Platform.LOCK]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -72,19 +84,47 @@ async def test_locks( assert hass.states.get(entity_id).state == STATE_UNLOCKED mock_run.assert_called_once() + +async def test_speed_limit_lock( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Tests that the deprecated speed limit lock entity is correct.""" + + issue_registry = async_get_issue_registry(hass) + + # Create the deprecated speed limit lock entity + entity = entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + ) + + with patch( + "homeassistant.components.tessie.lock.automations_with_entity", + return_value=["item"], + ): + await setup_platform(hass, [Platform.LOCK]) + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_speed_limit_{entity.entity_id}_item" + ) + # Test lock set value functions - entity_id = "lock.test_speed_limit" with patch( "homeassistant.components.tessie.lock.enable_speed_limit" ) as mock_enable_speed_limit: await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "1234"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_LOCKED + assert hass.states.get(entity.entity_id).state == STATE_LOCKED mock_enable_speed_limit.assert_called_once() + # Assert issue has been raised in the issue register + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_locked") with patch( "homeassistant.components.tessie.lock.disable_speed_limit" @@ -92,16 +132,17 @@ async def test_locks( await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "1234"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity.entity_id).state == STATE_UNLOCKED mock_disable_speed_limit.assert_called_once() + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_unlocked") with pytest.raises(ServiceValidationError): await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "abc"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "abc"}, blocking=True, ) From 24a1f0712fdc93150f39104c8b4b286495286f2b Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Wed, 24 Apr 2024 08:03:40 -0400 Subject: [PATCH 369/426] Fix Sonos music library play problems (#113429) --- .../components/sonos/media_browser.py | 50 +++++- .../components/sonos/media_player.py | 31 +++- tests/components/sonos/conftest.py | 106 +++++++++++++ tests/components/sonos/test_media_player.py | 146 +++++++++++++++--- 4 files changed, 302 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index b6fc250ab23..eeadd7db232 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -199,9 +199,15 @@ def build_item_response( payload["search_type"] == MediaType.ALBUM and media[0].item_class == "object.item.audioItem.musicTrack" ): - item = get_media(media_library, payload["idstring"], SONOS_ALBUM_ARTIST) + idstring = payload["idstring"] + if idstring.startswith("A:ALBUMARTIST/"): + search_type = SONOS_ALBUM_ARTIST + elif idstring.startswith("A:ALBUM/"): + search_type = SONOS_ALBUM + item = get_media(media_library, idstring, search_type) + title = getattr(item, "title", None) - thumbnail = get_thumbnail_url(SONOS_ALBUM_ARTIST, payload["idstring"]) + thumbnail = get_thumbnail_url(search_type, payload["idstring"]) if not title: try: @@ -493,8 +499,9 @@ def get_content_id(item: DidlObject) -> str: def get_media( media_library: MusicLibrary, item_id: str, search_type: str -) -> MusicServiceItem: - """Fetch media/album.""" +) -> MusicServiceItem | None: + """Fetch a single media/album.""" + _LOGGER.debug("get_media item_id [%s], search_type [%s]", item_id, search_type) search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type) if search_type == "playlists": @@ -513,9 +520,38 @@ def get_media( if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM: item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:]) - search_term = urllib.parse.unquote(item_id.split("/")[-1]) - matches = media_library.get_music_library_information( - search_type, search_term=search_term, full_album_art_uri=True + if item_id.startswith("A:ALBUM/") or search_type == "tracks": + search_term = urllib.parse.unquote(item_id.split("/")[-1]) + matches = media_library.get_music_library_information( + search_type, search_term=search_term, full_album_art_uri=True + ) + else: + # When requesting media by album_artist, composer, genre use the browse interface + # to navigate the hierarchy. This occurs when invoked from media browser or service + # calls + # Example: A:ALBUMARTIST/Neil Young/Greatest Hits - get specific album + # Example: A:ALBUMARTIST/Neil Young - get all albums + # Others: composer, genre + # A:// + splits = item_id.split("/") + title = urllib.parse.unquote(splits[2]) if len(splits) > 2 else None + browse_id_string = splits[0] + "/" + splits[1] + matches = media_library.browse_by_idstring( + search_type, browse_id_string, full_album_art_uri=True + ) + if title: + result = next( + (item for item in matches if (title == item.title)), + None, + ) + matches = [result] + + _LOGGER.debug( + "get_media search_type [%s] item_id [%s] matches [%d]", + search_type, + item_id, + len(matches), ) if len(matches) > 0: return matches[0] + return None diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 581bdaad37d..35c6be3fa6b 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -7,7 +7,7 @@ from functools import partial import logging from typing import Any -from soco import alarms +from soco import SoCo, alarms from soco.core import ( MUSIC_SRC_LINE_IN, MUSIC_SRC_RADIO, @@ -15,6 +15,7 @@ from soco.core import ( PLAY_MODES, ) from soco.data_structures import DidlFavorite +from soco.ms_data_structures import MusicServiceItem from sonos_websocket.exception import SonosWebsocketError import voluptuous as vol @@ -549,6 +550,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, is_radio: bool, **kwargs: Any ) -> None: """Wrap sync calls to async_play_media.""" + _LOGGER.debug("_play_media media_type %s media_id %s", media_type, media_id) enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE, MediaPlayerEnqueue.REPLACE) if media_type == "favorite_item_id": @@ -645,10 +647,35 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): _LOGGER.error('Could not find "%s" in the library', media_id) return - soco.play_uri(item.get_uri()) + self._play_media_queue(soco, item, enqueue) else: _LOGGER.error('Sonos does not support a media type of "%s"', media_type) + def _play_media_queue( + self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue + ): + """Manage adding, replacing, playing items onto the sonos queue.""" + _LOGGER.debug( + "_play_media_queue item_id [%s] title [%s] enqueue [%s]", + item.item_id, + item.title, + enqueue, + ) + if enqueue == MediaPlayerEnqueue.REPLACE: + soco.clear_queue() + + if enqueue in (MediaPlayerEnqueue.ADD, MediaPlayerEnqueue.REPLACE): + soco.add_to_queue(item, timeout=LONG_SERVICE_TIMEOUT) + if enqueue == MediaPlayerEnqueue.REPLACE: + soco.play_from_queue(0) + else: + pos = (self.media.queue_position or 0) + 1 + new_pos = soco.add_to_queue( + item, position=pos, timeout=LONG_SERVICE_TIMEOUT + ) + if enqueue == MediaPlayerEnqueue.PLAY: + soco.play_from_queue(new_pos - 1) + @soco_error() def set_sleep_timer(self, sleep_time: int) -> None: """Set the timer on the player.""" diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 218ca90a26b..0eb9b497fbd 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -203,6 +203,7 @@ class SoCoMockFactory: my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid mock_soco.get_speaker_info = Mock(return_value=my_speaker_info) + mock_soco.add_to_queue = Mock(return_value=10) mock_soco.avTransport = SonosMockService("AVTransport", ip_address) mock_soco.renderingControl = SonosMockService("RenderingControl", ip_address) @@ -303,11 +304,116 @@ def config_fixture(): return {DOMAIN: {MP_DOMAIN: {CONF_HOSTS: ["192.168.42.2"]}}} +class MockMusicServiceItem: + """Mocks a Soco MusicServiceItem.""" + + def __init__(self, title: str, item_id: str, parent_id: str, item_class: str): + """Initialize the mock item.""" + self.title = title + self.item_id = item_id + self.item_class = item_class + self.parent_id = parent_id + + +def mock_browse_by_idstring( + search_type: str, idstring: str, start=0, max_items=100, full_album_art_uri=False +) -> list[MockMusicServiceItem]: + """Mock the call to browse_by_id_string.""" + if search_type == "album_artists" and idstring == "A:ALBUMARTIST/Beatles": + return [ + MockMusicServiceItem( + "All", + idstring + "/", + idstring, + "object.container.playlistContainer.sameArtist", + ), + MockMusicServiceItem( + "A Hard Day's Night", + "A:ALBUMARTIST/Beatles/A%20Hard%20Day's%20Night", + idstring, + "object.container.album.musicAlbum", + ), + MockMusicServiceItem( + "Abbey Road", + "A:ALBUMARTIST/Beatles/Abbey%20Road", + idstring, + "object.container.album.musicAlbum", + ), + ] + # browse_by_id_string works with URL encoded or decoded strings + if search_type == "genres" and idstring in ( + "A:GENRE/Classic%20Rock", + "A:GENRE/Classic Rock", + ): + return [ + MockMusicServiceItem( + "All", + "A:GENRE/Classic%20Rock/", + "A:GENRE/Classic%20Rock", + "object.container.albumlist", + ), + MockMusicServiceItem( + "Bruce Springsteen", + "A:GENRE/Classic%20Rock/Bruce%20Springsteen", + "A:GENRE/Classic%20Rock", + "object.container.person.musicArtist", + ), + MockMusicServiceItem( + "Cream", + "A:GENRE/Classic%20Rock/Cream", + "A:GENRE/Classic%20Rock", + "object.container.person.musicArtist", + ), + ] + if search_type == "composers" and idstring in ( + "A:COMPOSER/Carlos%20Santana", + "A:COMPOSER/Carlos Santana", + ): + return [ + MockMusicServiceItem( + "All", + "A:COMPOSER/Carlos%20Santana/", + "A:COMPOSER/Carlos%20Santana", + "object.container.playlistContainer.sameArtist", + ), + MockMusicServiceItem( + "Between Good And Evil", + "A:COMPOSER/Carlos%20Santana/Between%20Good%20And%20Evil", + "A:COMPOSER/Carlos%20Santana", + "object.container.album.musicAlbum", + ), + MockMusicServiceItem( + "Sacred Fire", + "A:COMPOSER/Carlos%20Santana/Sacred%20Fire", + "A:COMPOSER/Carlos%20Santana", + "object.container.album.musicAlbum", + ), + ] + return [] + + +def mock_get_music_library_information( + search_type: str, search_term: str, full_album_art_uri: bool = True +) -> list[MockMusicServiceItem]: + """Mock the call to get music library information.""" + if search_type == "albums" and search_term == "Abbey Road": + return [ + MockMusicServiceItem( + "Abbey Road", + "A:ALBUM/Abbey%20Road", + "A:ALBUM", + "object.container.album.musicAlbum", + ) + ] + + @pytest.fixture(name="music_library") def music_library_fixture(): """Create music_library fixture.""" music_library = MagicMock() music_library.get_sonos_favorites.return_value.update_id = 1 + music_library.browse_by_idstring = mock_browse_by_idstring + music_library.get_music_library_information = mock_get_music_library_information return music_library diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index c181520b85d..976d3480429 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -7,7 +7,10 @@ import pytest from homeassistant.components.media_player import ( DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, + MediaPlayerEnqueue, ) +from homeassistant.components.media_player.const import ATTR_MEDIA_ENQUEUE +from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import ( @@ -16,7 +19,7 @@ from homeassistant.helpers.device_registry import ( DeviceRegistry, ) -from .conftest import SoCoMockFactory +from .conftest import MockMusicServiceItem, SoCoMockFactory async def test_device_registry( @@ -65,35 +68,134 @@ async def test_entity_basic( assert attributes["volume_level"] == 0.19 -class _MockMusicServiceItem: - """Mocks a Soco MusicServiceItem.""" - - def __init__( - self, - title: str, - item_id: str, - parent_id: str, - item_class: str, - ) -> None: - """Initialize the mock item.""" - self.title = title - self.item_id = item_id - self.item_class = item_class - self.parent_id = parent_id - - def get_uri(self) -> str: - """Return URI.""" - return self.item_id.replace("S://", "x-file-cifs://") +@pytest.mark.parametrize( + ("media_content_type", "media_content_id", "enqueue", "test_result"), + [ + ( + "artist", + "A:ALBUMARTIST/Beatles", + MediaPlayerEnqueue.REPLACE, + { + "title": "All", + "item_id": "A:ALBUMARTIST/Beatles/", + "clear_queue": 1, + "position": None, + "play": 1, + "play_pos": 0, + }, + ), + ( + "genre", + "A:GENRE/Classic%20Rock", + MediaPlayerEnqueue.ADD, + { + "title": "All", + "item_id": "A:GENRE/Classic%20Rock/", + "clear_queue": 0, + "position": None, + "play": 0, + "play_pos": 0, + }, + ), + ( + "album", + "A:ALBUM/Abbey%20Road", + MediaPlayerEnqueue.NEXT, + { + "title": "Abbey Road", + "item_id": "A:ALBUM/Abbey%20Road", + "clear_queue": 0, + "position": 1, + "play": 0, + "play_pos": 0, + }, + ), + ( + "composer", + "A:COMPOSER/Carlos%20Santana", + MediaPlayerEnqueue.PLAY, + { + "title": "All", + "item_id": "A:COMPOSER/Carlos%20Santana/", + "clear_queue": 0, + "position": 1, + "play": 1, + "play_pos": 9, + }, + ), + ( + "artist", + "A:ALBUMARTIST/Beatles/Abbey%20Road", + MediaPlayerEnqueue.REPLACE, + { + "title": "Abbey Road", + "item_id": "A:ALBUMARTIST/Beatles/Abbey%20Road", + "clear_queue": 1, + "position": None, + "play": 1, + "play_pos": 0, + }, + ), + ], +) +async def test_play_media_library( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, + async_autosetup_sonos, + media_content_type, + media_content_id, + enqueue, + test_result, +) -> None: + """Test playing local library with a variety of options.""" + sock_mock = soco_factory.mock_list.get("192.168.42.2") + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + "entity_id": "media_player.zone_a", + "media_content_type": media_content_type, + "media_content_id": media_content_id, + ATTR_MEDIA_ENQUEUE: enqueue, + }, + blocking=True, + ) + assert sock_mock.clear_queue.call_count == test_result["clear_queue"] + assert sock_mock.add_to_queue.call_count == 1 + assert ( + sock_mock.add_to_queue.call_args_list[0].args[0].title == test_result["title"] + ) + assert ( + sock_mock.add_to_queue.call_args_list[0].args[0].item_id + == test_result["item_id"] + ) + if test_result["position"] is not None: + assert ( + sock_mock.add_to_queue.call_args_list[0].kwargs["position"] + == test_result["position"] + ) + else: + assert "position" not in sock_mock.add_to_queue.call_args_list[0].kwargs + assert ( + sock_mock.add_to_queue.call_args_list[0].kwargs["timeout"] + == LONG_SERVICE_TIMEOUT + ) + assert sock_mock.play_from_queue.call_count == test_result["play"] + if test_result["play"] != 0: + assert ( + sock_mock.play_from_queue.call_args_list[0].args[0] + == test_result["play_pos"] + ) _mock_playlists = [ - _MockMusicServiceItem( + MockMusicServiceItem( "playlist1", "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_1", "A:PLAYLISTS", "object.container.playlistContainer", ), - _MockMusicServiceItem( + MockMusicServiceItem( "playlist2", "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_2", "A:PLAYLISTS", From 1f4585cc9ea484553e8d02865c1d622817a7a129 Mon Sep 17 00:00:00 2001 From: Shai Ungar Date: Wed, 24 Apr 2024 15:29:13 +0300 Subject: [PATCH 370/426] Add service to 17track to get packages (#116067) * Add service to 17track * Add service to 17track change to select selector add snapshot test * Add service to 17track use strings for the selector * Add service to 17track fix test --- .../components/seventeentrack/__init__.py | 77 +++++++++++++++++-- .../components/seventeentrack/const.py | 5 ++ .../components/seventeentrack/coordinator.py | 10 +-- .../components/seventeentrack/icons.json | 3 + .../components/seventeentrack/services.yaml | 20 +++++ .../components/seventeentrack/strings.json | 29 +++++++ .../snapshots/test_services.ambr | 53 +++++++++++++ .../seventeentrack/test_services.py | 76 ++++++++++++++++++ 8 files changed, 262 insertions(+), 11 deletions(-) create mode 100644 homeassistant/components/seventeentrack/services.yaml create mode 100644 tests/components/seventeentrack/snapshots/test_services.ambr create mode 100644 tests/components/seventeentrack/test_services.py diff --git a/homeassistant/components/seventeentrack/__init__.py b/homeassistant/components/seventeentrack/__init__.py index 1f9879cdcbc..40c9c8d58d1 100644 --- a/homeassistant/components/seventeentrack/__init__.py +++ b/homeassistant/components/seventeentrack/__init__.py @@ -4,16 +4,81 @@ from py17track import Client as SeventeenTrackClient from py17track.errors import SeventeenTrackError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + ATTR_FRIENDLY_NAME, + ATTR_LOCATION, + CONF_PASSWORD, + CONF_USERNAME, + Platform, +) +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util import slugify -from .const import DOMAIN +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_INFO_TEXT, + ATTR_PACKAGE_STATE, + ATTR_STATUS, + ATTR_TIMESTAMP, + ATTR_TRACKING_NUMBER, + DOMAIN, + SERVICE_GET_PACKAGES, +) from .coordinator import SeventeenTrackCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the 17Track component.""" + + async def get_packages(call: ServiceCall) -> ServiceResponse: + """Get packages from 17Track.""" + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + package_states = call.data.get(ATTR_PACKAGE_STATE, []) + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + live_packages = sorted( + await seventeen_coordinator.client.profile.packages( + show_archived=seventeen_coordinator.show_archived + ) + ) + + return { + "packages": [ + { + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + for package in live_packages + if slugify(package.status) in package_states or package_states == [] + ] + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PACKAGES, + get_packages, + supports_response=SupportsResponse.ONLY, + ) + return True + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up 17Track from a config entry.""" @@ -26,10 +91,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SeventeenTrackError as err: raise ConfigEntryNotReady from err - coordinator = SeventeenTrackCoordinator(hass, client) + seventeen_coordinator = SeventeenTrackCoordinator(hass, client) - await coordinator.async_config_entry_first_refresh() + await seventeen_coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = seventeen_coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index fc7ca7b2e7f..39932d31935 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -40,3 +40,8 @@ NOTIFICATION_DELIVERED_MESSAGE = ( ) VALUE_DELIVERED = "Delivered" + +SERVICE_GET_PACKAGES = "get_packages" + +ATTR_PACKAGE_STATE = "package_state" +ATTR_CONFIG_ENTRY_ID = "config_entry_id" diff --git a/homeassistant/components/seventeentrack/coordinator.py b/homeassistant/components/seventeentrack/coordinator.py index 84bdf1e1359..4da4969ed92 100644 --- a/homeassistant/components/seventeentrack/coordinator.py +++ b/homeassistant/components/seventeentrack/coordinator.py @@ -45,19 +45,19 @@ class SeventeenTrackCoordinator(DataUpdateCoordinator[SeventeenTrackData]): self.show_delivered = self.config_entry.options[CONF_SHOW_DELIVERED] self.account_id = client.profile.account_id - self._show_archived = self.config_entry.options[CONF_SHOW_ARCHIVED] - self._client = client + self.show_archived = self.config_entry.options[CONF_SHOW_ARCHIVED] + self.client = client async def _async_update_data(self) -> SeventeenTrackData: """Fetch data from 17Track API.""" try: - summary = await self._client.profile.summary( - show_archived=self._show_archived + summary = await self.client.profile.summary( + show_archived=self.show_archived ) live_packages = set( - await self._client.profile.packages(show_archived=self._show_archived) + await self.client.profile.packages(show_archived=self.show_archived) ) except SeventeenTrackError as err: diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json index 05323a69743..78ca65edc4d 100644 --- a/homeassistant/components/seventeentrack/icons.json +++ b/homeassistant/components/seventeentrack/icons.json @@ -26,5 +26,8 @@ "default": "mdi:package" } } + }, + "services": { + "get_packages": "mdi:package" } } diff --git a/homeassistant/components/seventeentrack/services.yaml b/homeassistant/components/seventeentrack/services.yaml new file mode 100644 index 00000000000..41cb66ada5f --- /dev/null +++ b/homeassistant/components/seventeentrack/services.yaml @@ -0,0 +1,20 @@ +get_packages: + fields: + package_state: + selector: + select: + multiple: true + options: + - "not_found" + - "in_transit" + - "expired" + - "ready_to_be_picked_up" + - "undelivered" + - "delivered" + - "returned" + translation_key: package_state + config_entry_id: + required: true + selector: + config_entry: + integration: seventeentrack diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index 8d91f926d50..626af29e856 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -66,5 +66,34 @@ "name": "Package {name}" } } + }, + "services": { + "get_packages": { + "name": "Get packages", + "description": "Get packages from 17Track", + "fields": { + "package_state": { + "name": "Package states", + "description": "Only return packages with the specified states. Returns all packages if not specified." + }, + "config_entry_id": { + "name": "17Track service", + "description": "The packages will be retrieved for the selected service." + } + } + } + }, + "selector": { + "package_state": { + "options": { + "not_found": "[%key:component::seventeentrack::entity::sensor::not_found::name%]", + "in_transit": "[%key:component::seventeentrack::entity::sensor::in_transit::name%]", + "expired": "[%key:component::seventeentrack::entity::sensor::expired::name%]", + "ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]", + "undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]", + "delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]", + "returned": "[%key:component::seventeentrack::entity::sensor::returned::name%]" + } + } } } diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr new file mode 100644 index 00000000000..185a1d44fe0 --- /dev/null +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -0,0 +1,53 @@ +# serializer version: 1 +# name: test_get_all_packages + dict({ + 'packages': list([ + dict({ + 'friendly_name': 'friendly name 3', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Expired', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '123', + }), + dict({ + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'In Transit', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '456', + }), + dict({ + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Delivered', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '789', + }), + ]), + }) +# --- +# name: test_get_packages_from_list + dict({ + 'packages': list([ + dict({ + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'In Transit', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '456', + }), + dict({ + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Delivered', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '789', + }), + ]), + }) +# --- diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py new file mode 100644 index 00000000000..cbd7132bf67 --- /dev/null +++ b/tests/components/seventeentrack/test_services.py @@ -0,0 +1,76 @@ +"""Tests for the seventeentrack service.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.seventeentrack import DOMAIN, SERVICE_GET_PACKAGES +from homeassistant.core import HomeAssistant, SupportsResponse + +from tests.common import MockConfigEntry +from tests.components.seventeentrack import init_integration +from tests.components.seventeentrack.conftest import get_package + + +async def test_get_packages_from_list( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns only the packages in the list.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + "config_entry_id": mock_config_entry.entry_id, + "package_state": ["in_transit", "delivered"], + }, + blocking=True, + return_response=SupportsResponse.ONLY, + ) + + assert service_response == snapshot + + +async def test_get_all_packages( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns all packages when non provided.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + "config_entry_id": mock_config_entry.entry_id, + }, + blocking=True, + return_response=SupportsResponse.ONLY, + ) + + assert service_response == snapshot + + +async def _mock_packages(mock_seventeentrack): + package1 = get_package(status=10) + package2 = get_package( + tracking_number="789", + friendly_name="friendly name 2", + status=40, + ) + package3 = get_package( + tracking_number="123", + friendly_name="friendly name 3", + status=20, + ) + mock_seventeentrack.return_value.profile.packages.return_value = [ + package1, + package2, + package3, + ] From 350ca48d4c10b2105e1e3513da7137498dd6ad83 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 24 Apr 2024 15:12:29 +0200 Subject: [PATCH 371/426] Return specific group state if there is one (#115866) * Return specific group state if there is one * Refactor * Additional test cases * Refactor * Break out if more than one on state * tweaks * Remove log, add comment * add comment * Apply suggestions from code review Co-authored-by: J. Nick Koston * Refactor and improve comments * Refactor to class method * More filtering * Apply suggestions from code review * Only active if not excluded * Do not use a set * Apply suggestions from code review Co-authored-by: Erik Montnemery --------- Co-authored-by: J. Nick Koston Co-authored-by: Erik Montnemery --- homeassistant/components/group/entity.py | 95 ++++++++++++++++++---- homeassistant/components/group/registry.py | 14 +++- tests/components/group/test_init.py | 24 +++++- 3 files changed, 109 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/group/entity.py b/homeassistant/components/group/entity.py index a8fd9027984..5ac913dde8d 100644 --- a/homeassistant/components/group/entity.py +++ b/homeassistant/components/group/entity.py @@ -8,7 +8,7 @@ from collections.abc import Callable, Collection, Mapping import logging from typing import Any -from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_ON +from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import ( CALLBACK_TYPE, Event, @@ -131,6 +131,9 @@ class Group(Entity): _unrecorded_attributes = frozenset({ATTR_ENTITY_ID, ATTR_ORDER, ATTR_AUTO}) _attr_should_poll = False + # In case there is only one active domain we use specific ON or OFF + # values, if all ON or OFF states are equal + single_active_domain: str | None tracking: tuple[str, ...] trackable: tuple[str, ...] @@ -287,6 +290,7 @@ class Group(Entity): if not entity_ids: self.tracking = () self.trackable = () + self.single_active_domain = None return registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] @@ -294,12 +298,22 @@ class Group(Entity): tracking: list[str] = [] trackable: list[str] = [] + self.single_active_domain = None + multiple_domains: bool = False for ent_id in entity_ids: ent_id_lower = ent_id.lower() domain = split_entity_id(ent_id_lower)[0] tracking.append(ent_id_lower) - if domain not in excluded_domains: - trackable.append(ent_id_lower) + if domain in excluded_domains: + continue + + trackable.append(ent_id_lower) + + if not multiple_domains and self.single_active_domain is None: + self.single_active_domain = domain + if self.single_active_domain != domain: + multiple_domains = True + self.single_active_domain = None self.trackable = tuple(trackable) self.tracking = tuple(tracking) @@ -395,10 +409,36 @@ class Group(Entity): self._on_off[entity_id] = state in registry.on_off_mapping else: entity_on_state = registry.on_states_by_domain[domain] - if domain in registry.on_states_by_domain: - self._on_states.update(entity_on_state) + self._on_states.update(entity_on_state) self._on_off[entity_id] = state in entity_on_state + def _detect_specific_on_off_state(self, group_is_on: bool) -> set[str]: + """Check if a specific ON or OFF state is possible.""" + # In case the group contains entities of the same domain with the same ON + # or an OFF state (one or more domains), we want to use that specific state. + # If we have more then one ON or OFF state we default to STATE_ON or STATE_OFF. + registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] + active_on_states: set[str] = set() + active_off_states: set[str] = set() + for entity_id in self.trackable: + if (state := self.hass.states.get(entity_id)) is None: + continue + current_state = state.state + if ( + group_is_on + and (domain_on_states := registry.on_states_by_domain.get(state.domain)) + and current_state in domain_on_states + ): + active_on_states.add(current_state) + # If we have more than one on state, the group state + # will result in STATE_ON and we can stop checking + if len(active_on_states) > 1: + break + elif current_state in registry.off_on_mapping: + active_off_states.add(current_state) + + return active_on_states if group_is_on else active_off_states + @callback def _async_update_group_state(self, tr_state: State | None = None) -> None: """Update group state. @@ -425,27 +465,48 @@ class Group(Entity): elif tr_state.attributes.get(ATTR_ASSUMED_STATE): self._assumed_state = True - num_on_states = len(self._on_states) + # If we do not have an on state for any domains + # we use None (which will be STATE_UNKNOWN) + if (num_on_states := len(self._on_states)) == 0: + self._state = None + return + + group_is_on = self.mode(self._on_off.values()) + # If all the entity domains we are tracking # have the same on state we use this state # and its hass.data[REG_KEY].on_off_mapping to off if num_on_states == 1: - on_state = list(self._on_states)[0] - # If we do not have an on state for any domains - # we use None (which will be STATE_UNKNOWN) - elif num_on_states == 0: - self._state = None - return + on_state = next(iter(self._on_states)) # If the entity domains have more than one - # on state, we use STATE_ON/STATE_OFF - else: + # on state, we use STATE_ON/STATE_OFF, unless there is + # only one specific `on` state in use for one specific domain + elif self.single_active_domain and num_on_states: + active_on_states = self._detect_specific_on_off_state(True) + on_state = ( + list(active_on_states)[0] if len(active_on_states) == 1 else STATE_ON + ) + elif group_is_on: on_state = STATE_ON - group_is_on = self.mode(self._on_off.values()) if group_is_on: self._state = on_state + return + + registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] + if ( + active_domain := self.single_active_domain + ) and active_domain in registry.off_state_by_domain: + # If there is only one domain used, + # then we return the off state for that domain.s + self._state = registry.off_state_by_domain[active_domain] else: - registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] - self._state = registry.on_off_mapping[on_state] + active_off_states = self._detect_specific_on_off_state(False) + # If there is one off state in use then we return that specific state, + # also if there a multiple domains involved, e.g. + # person and device_tracker, with a shared state. + self._state = ( + list(active_off_states)[0] if len(active_off_states) == 1 else STATE_OFF + ) def async_get_component(hass: HomeAssistant) -> EntityComponent[Group]: diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index 6cdb929d60c..474448db68a 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -49,9 +49,12 @@ class GroupIntegrationRegistry: def __init__(self) -> None: """Imitialize registry.""" - self.on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} + self.on_off_mapping: dict[str, dict[str | None, str]] = { + STATE_ON: {None: STATE_OFF} + } self.off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} self.on_states_by_domain: dict[str, set[str]] = {} + self.off_state_by_domain: dict[str, str] = {} self.exclude_domains: set[str] = set() def exclude_domain(self) -> None: @@ -60,11 +63,14 @@ class GroupIntegrationRegistry: def on_off_states(self, on_states: set, off_state: str) -> None: """Register on and off states for the current domain.""" + domain = current_domain.get() for on_state in on_states: if on_state not in self.on_off_mapping: - self.on_off_mapping[on_state] = off_state - + self.on_off_mapping[on_state] = {domain: off_state} + else: + self.on_off_mapping[on_state][domain] = off_state if len(on_states) == 1 and off_state not in self.off_on_mapping: self.off_on_mapping[off_state] = list(on_states)[0] - self.on_states_by_domain[current_domain.get()] = set(on_states) + self.on_states_by_domain[domain] = set(on_states) + self.off_state_by_domain[domain] = off_state diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index d3f2747933e..b9cdfcb1590 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -9,7 +9,7 @@ from unittest.mock import patch import pytest -from homeassistant.components import group +from homeassistant.components import group, vacuum from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_FRIENDLY_NAME, @@ -659,6 +659,24 @@ async def test_is_on(hass: HomeAssistant) -> None: (STATE_ON, True), (STATE_OFF, False), ), + ( + ("vacuum", "vacuum"), + # Cleaning is the only on state + (vacuum.STATE_DOCKED, vacuum.STATE_CLEANING), + # Returning is the only on state + (vacuum.STATE_RETURNING, vacuum.STATE_PAUSED), + (vacuum.STATE_CLEANING, True), + (vacuum.STATE_RETURNING, True), + ), + ( + ("vacuum", "vacuum"), + # Multiple on states, so group state will be STATE_ON + (vacuum.STATE_RETURNING, vacuum.STATE_CLEANING), + # Only off states, so group state will be off + (vacuum.STATE_PAUSED, vacuum.STATE_IDLE), + (STATE_ON, True), + (STATE_OFF, False), + ), ], ) async def test_is_on_and_state_mixed_domains( @@ -1220,7 +1238,7 @@ async def test_group_climate_all_cool(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - assert hass.states.get("group.group_zero").state == STATE_ON + assert hass.states.get("group.group_zero").state == "cool" async def test_group_climate_all_off(hass: HomeAssistant) -> None: @@ -1334,7 +1352,7 @@ async def test_group_vacuum_on(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - assert hass.states.get("group.group_zero").state == STATE_ON + assert hass.states.get("group.group_zero").state == "cleaning" async def test_device_tracker_not_home(hass: HomeAssistant) -> None: From 70b358bca1adda27fa2e234f477e375d11b6cc66 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 24 Apr 2024 15:13:33 +0200 Subject: [PATCH 372/426] Always reload after a successful reauth flow (#116026) * Always reload after a succesfull reauth-flow * Add test, fix CI failures * Add kwarg to prevent reloading and tests * Do not reload entry for bond if it exists * Remove mocks on internals * Rename kwarg to always_reload * Update tests/components/weatherflow_cloud/test_config_flow.py * Update tests/components/homeworks/test_config_flow.py * Update tests/components/homeworks/test_config_flow.py * Rename to option to reload_even_if_entry_is_unchanged --- homeassistant/components/bond/config_flow.py | 5 +- .../components/homeworks/config_flow.py | 5 +- .../weatherflow_cloud/config_flow.py | 1 + homeassistant/config_entries.py | 3 +- tests/test_config_entries.py | 96 ++++++++++++++++--- 5 files changed, 92 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/bond/config_flow.py b/homeassistant/components/bond/config_flow.py index 45170a0404f..a12d3057258 100644 --- a/homeassistant/components/bond/config_flow.py +++ b/homeassistant/components/bond/config_flow.py @@ -113,7 +113,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN): ): updates[CONF_ACCESS_TOKEN] = token return self.async_update_reload_and_abort( - entry, data={**entry.data, **updates}, reason="already_configured" + entry, + data={**entry.data, **updates}, + reason="already_configured", + reload_even_if_entry_is_unchanged=False, ) self._discovered = {CONF_HOST: host, CONF_NAME: bond_id} diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index b9515c306d6..f447860c53f 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -690,7 +690,10 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PORT: user_input[CONF_PORT], } return self.async_update_reload_and_abort( - entry, options=new_options, reason="reconfigure_successful" + entry, + options=new_options, + reason="reconfigure_successful", + reload_even_if_entry_is_unchanged=False, ) return self.async_show_form( diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index 4c905a8451e..e8972c320ed 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -50,6 +50,7 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): existing_entry, data={CONF_API_TOKEN: api_token}, reason="reauth_successful", + reload_even_if_entry_is_unchanged=False, ) return self.async_show_form( diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 0637e5f7c87..056814bbc4d 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -2399,6 +2399,7 @@ class ConfigFlow(ConfigEntryBaseFlow): data: Mapping[str, Any] | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, reason: str = "reauth_successful", + reload_even_if_entry_is_unchanged: bool = True, ) -> ConfigFlowResult: """Update config entry, reload config entry and finish config flow.""" result = self.hass.config_entries.async_update_entry( @@ -2408,7 +2409,7 @@ class ConfigFlow(ConfigEntryBaseFlow): data=data, options=options, ) - if result: + if reload_even_if_entry_is_unchanged or result: self.hass.config_entries.async_schedule_reload(entry.entry_id) return self.async_abort(reason=reason) diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 63dea5ea735..68f770631ed 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4504,24 +4504,86 @@ def test_raise_trying_to_add_same_config_entry_twice( assert f"An entry with the id {entry.entry_id} already exists" in caplog.text +@pytest.mark.parametrize( + ( + "title", + "unique_id", + "data_vendor", + "options_vendor", + "kwargs", + "calls_entry_load_unload", + ), + [ + ( + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), + {}, + (2, 1), + ), + ( + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {}, + (2, 1), + ), + ( + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), + {"reload_even_if_entry_is_unchanged": True}, + (2, 1), + ), + ( + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {"reload_even_if_entry_is_unchanged": False}, + (1, 0), + ), + ], + ids=[ + "changed_entry_default", + "unchanged_entry_default", + "changed_entry_explicit_reload", + "changed_entry_no_reload", + ], +) async def test_update_entry_and_reload( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + title: tuple[str, str], + unique_id: tuple[str, str], + data_vendor: tuple[str, str], + options_vendor: tuple[str, str], + kwargs: dict[str, Any], + calls_entry_load_unload: tuple[int, int], ) -> None: """Test updating an entry and reloading.""" entry = MockConfigEntry( domain="comp", - unique_id="1234", - title="Test", - data={"vendor": "data"}, - options={"vendor": "options"}, + unique_id=unique_id[0], + title=title[0], + data={"vendor": data_vendor[0]}, + options={"vendor": options_vendor[0]}, ) entry.add_to_hass(hass) - mock_integration( - hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)) + comp = MockModule( + "comp", + async_setup_entry=AsyncMock(return_value=True), + async_unload_entry=AsyncMock(return_value=True), ) + mock_integration(hass, comp) mock_platform(hass, "comp.config_flow", None) + await hass.config_entries.async_setup(entry.entry_id) + class MockFlowHandler(config_entries.ConfigFlow): """Define a mock flow handler.""" @@ -4531,23 +4593,27 @@ async def test_update_entry_and_reload( """Mock Reauth.""" return self.async_update_reload_and_abort( entry=entry, - unique_id="5678", - title="Updated Title", - data={"vendor": "data2"}, - options={"vendor": "options2"}, + unique_id=unique_id[1], + title=title[1], + data={"vendor": data_vendor[1]}, + options={"vendor": options_vendor[1]}, + **kwargs, ) with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}): task = await manager.flow.async_init("comp", context={"source": "reauth"}) await hass.async_block_till_done() - assert entry.title == "Updated Title" - assert entry.unique_id == "5678" - assert entry.data == {"vendor": "data2"} - assert entry.options == {"vendor": "options2"} + assert entry.title == title[1] + assert entry.unique_id == unique_id[1] + assert entry.data == {"vendor": data_vendor[1]} + assert entry.options == {"vendor": options_vendor[1]} assert entry.state == config_entries.ConfigEntryState.LOADED assert task["type"] == FlowResultType.ABORT assert task["reason"] == "reauth_successful" + # Assert entry was reloaded + assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] + assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] @pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) From ea96ac37b7d63791e834c7882d58fd9dddb9a2b3 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 24 Apr 2024 15:29:51 +0200 Subject: [PATCH 373/426] Update frontend to 20240424.1 (#116103) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index d711314cabb..ad63bdbed84 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240404.2"] + "requirements": ["home-assistant-frontend==20240424.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 50c17024b01..74c4d185847 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ habluetooth==2.8.0 hass-nabucasa==0.78.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240404.2 +home-assistant-frontend==20240424.1 home-assistant-intents==2024.4.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 5a7ce85328a..76b37b60e62 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1078,7 +1078,7 @@ hole==0.8.0 holidays==0.47 # homeassistant.components.frontend -home-assistant-frontend==20240404.2 +home-assistant-frontend==20240424.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4831f441286..82465f7a5c6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -880,7 +880,7 @@ hole==0.8.0 holidays==0.47 # homeassistant.components.frontend -home-assistant-frontend==20240404.2 +home-assistant-frontend==20240424.1 # homeassistant.components.conversation home-assistant-intents==2024.4.3 From 74cea2ecaedc5ddf5b61f2188e16936d1186c2d2 Mon Sep 17 00:00:00 2001 From: mletenay Date: Wed, 24 Apr 2024 15:31:29 +0200 Subject: [PATCH 374/426] Update goodwe library to 0.3.2 (#115309) --- homeassistant/components/goodwe/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/goodwe/manifest.json b/homeassistant/components/goodwe/manifest.json index 03575f9f4e2..6f1bdd2b449 100644 --- a/homeassistant/components/goodwe/manifest.json +++ b/homeassistant/components/goodwe/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/goodwe", "iot_class": "local_polling", "loggers": ["goodwe"], - "requirements": ["goodwe==0.2.32"] + "requirements": ["goodwe==0.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 76b37b60e62..256c5c3500e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -952,7 +952,7 @@ glances-api==0.6.0 goalzero==0.2.2 # homeassistant.components.goodwe -goodwe==0.2.32 +goodwe==0.3.2 # homeassistant.components.google_mail # homeassistant.components.google_tasks diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 82465f7a5c6..63a3563ebaf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -781,7 +781,7 @@ glances-api==0.6.0 goalzero==0.2.2 # homeassistant.components.goodwe -goodwe==0.2.32 +goodwe==0.3.2 # homeassistant.components.google_mail # homeassistant.components.google_tasks From 220dc1f125a1f3c91f66e6172341a316f0488856 Mon Sep 17 00:00:00 2001 From: Manuel Dipolt Date: Wed, 24 Apr 2024 15:59:09 +0200 Subject: [PATCH 375/426] Add binary sensor platform to romy integration (#112998) * wip * poc working, reworked to a binary sensor list * Update homeassistant/components/romy/binary_sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/romy/binary_sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/romy/binary_sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/romy/binary_sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/romy/binary_sensor.py Co-authored-by: Joost Lekkerkerker * code review changes, adjust translation key names * code review clean up: removed unecessary RomyBinarySensorEntityDescription * code review changes: translation names * code review changes, put DeviceInfo into RomyEntity * code review change: change docked icon to type plug * code review change: move CoordinatorEntity to the base class * code review changes: sensors disabled per default * code review: icons.json added * code review changes: sensors enabled per default again * checkout main entity.py * type hinting changes * Update homeassistant/components/romy/binary_sensor.py --------- Co-authored-by: Joost Lekkerkerker --- .coveragerc | 1 + .../components/romy/binary_sensor.py | 73 +++++++++++++++++++ homeassistant/components/romy/const.py | 2 +- homeassistant/components/romy/icons.json | 20 +++++ homeassistant/components/romy/strings.json | 14 ++++ 5 files changed, 109 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/romy/binary_sensor.py create mode 100644 homeassistant/components/romy/icons.json diff --git a/.coveragerc b/.coveragerc index 6f382bcb780..ca2cce2719f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1158,6 +1158,7 @@ omit = homeassistant/components/roborock/coordinator.py homeassistant/components/rocketchat/notify.py homeassistant/components/romy/__init__.py + homeassistant/components/romy/binary_sensor.py homeassistant/components/romy/coordinator.py homeassistant/components/romy/entity.py homeassistant/components/romy/vacuum.py diff --git a/homeassistant/components/romy/binary_sensor.py b/homeassistant/components/romy/binary_sensor.py new file mode 100644 index 00000000000..263c5840e5f --- /dev/null +++ b/homeassistant/components/romy/binary_sensor.py @@ -0,0 +1,73 @@ +"""Checking binary status values from your ROMY.""" + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import RomyVacuumCoordinator +from .entity import RomyEntity + +BINARY_SENSORS: list[BinarySensorEntityDescription] = [ + BinarySensorEntityDescription( + key="dustbin", + translation_key="dustbin_present", + ), + BinarySensorEntityDescription( + key="dock", + translation_key="docked", + device_class=BinarySensorDeviceClass.PLUG, + ), + BinarySensorEntityDescription( + key="water_tank", + translation_key="water_tank_present", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + BinarySensorEntityDescription( + key="water_tank_empty", + translation_key="water_tank_empty", + device_class=BinarySensorDeviceClass.PROBLEM, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up ROMY vacuum cleaner.""" + + coordinator: RomyVacuumCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + RomyBinarySensor(coordinator, entity_description) + for entity_description in BINARY_SENSORS + if entity_description.key in coordinator.romy.binary_sensors + ) + + +class RomyBinarySensor(RomyEntity, BinarySensorEntity): + """RomyBinarySensor Class.""" + + entity_description: BinarySensorEntityDescription + + def __init__( + self, + coordinator: RomyVacuumCoordinator, + entity_description: BinarySensorEntityDescription, + ) -> None: + """Initialize ROMYs StatusSensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{entity_description.key}_{self.romy.unique_id}" + self.entity_description = entity_description + + @property + def is_on(self) -> bool: + """Return the value of the sensor.""" + return bool(self.romy.binary_sensors[self.entity_description.key]) diff --git a/homeassistant/components/romy/const.py b/homeassistant/components/romy/const.py index 5d42380902b..0fa039e8d1b 100644 --- a/homeassistant/components/romy/const.py +++ b/homeassistant/components/romy/const.py @@ -6,6 +6,6 @@ import logging from homeassistant.const import Platform DOMAIN = "romy" -PLATFORMS = [Platform.VACUUM] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.VACUUM] UPDATE_INTERVAL = timedelta(seconds=5) LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/romy/icons.json b/homeassistant/components/romy/icons.json new file mode 100644 index 00000000000..c27b36af64c --- /dev/null +++ b/homeassistant/components/romy/icons.json @@ -0,0 +1,20 @@ +{ + "entity": { + "binary_sensor": { + "water_tank_empty": { + "default": "mdi:cup-outline", + "state": { + "off": "mdi:cup-water", + "on": "mdi:cup-outline" + } + }, + "dustbin_present": { + "default": "mdi:basket-check", + "state": { + "off": "mdi:basket-remove", + "on": "mdi:basket-check" + } + } + } + } +} diff --git a/homeassistant/components/romy/strings.json b/homeassistant/components/romy/strings.json index 26dc60a2e84..f4bc4d191ff 100644 --- a/homeassistant/components/romy/strings.json +++ b/homeassistant/components/romy/strings.json @@ -46,6 +46,20 @@ } } } + }, + "binary_sensor": { + "dustbin_present": { + "name": "Dustbin present" + }, + "docked": { + "name": "Robot docked" + }, + "water_tank_present": { + "name": "Watertank present" + }, + "water_tank_empty": { + "name": "Watertank empty" + } } } } From d0f5e40b197c41e66a0d9b457bb5714d11c02ced Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 16:14:44 +0200 Subject: [PATCH 376/426] Refactor ESPHome manager to avoid sending signals in tests (#116033) --- .../components/esphome/entry_data.py | 30 +++++++++---- homeassistant/components/esphome/update.py | 8 +--- tests/components/esphome/conftest.py | 11 +++-- tests/components/esphome/test_update.py | 45 ++++++++----------- 4 files changed, 48 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index 7316c09cc5e..41b18c9b88c 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -49,9 +49,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store -from homeassistant.util.signal_type import SignalType from .const import DOMAIN from .dashboard import async_get_dashboard @@ -126,6 +124,9 @@ class RuntimeEntryData: default_factory=dict ) device_update_subscriptions: set[CALLBACK_TYPE] = field(default_factory=set) + static_info_update_subscriptions: set[Callable[[list[EntityInfo]], None]] = field( + default_factory=set + ) loaded_platforms: set[Platform] = field(default_factory=set) platform_load_lock: asyncio.Lock = field(default_factory=asyncio.Lock) _storage_contents: StoreData | None = None @@ -154,11 +155,6 @@ class RuntimeEntryData: "_", " " ) - @property - def signal_static_info_updated(self) -> SignalType[list[EntityInfo]]: - """Return the signal to listen to for updates on static info.""" - return SignalType(f"esphome_{self.entry_id}_on_list") - @callback def async_register_static_info_callback( self, @@ -303,8 +299,9 @@ class RuntimeEntryData: for callback_ in callbacks_: callback_(entity_infos) - # Then send dispatcher event - async_dispatcher_send(hass, self.signal_static_info_updated, infos) + # Finally update static info subscriptions + for callback_ in self.static_info_update_subscriptions: + callback_(infos) @callback def async_subscribe_device_updated(self, callback_: CALLBACK_TYPE) -> CALLBACK_TYPE: @@ -317,6 +314,21 @@ class RuntimeEntryData: """Unsubscribe to device updates.""" self.device_update_subscriptions.remove(callback_) + @callback + def async_subscribe_static_info_updated( + self, callback_: Callable[[list[EntityInfo]], None] + ) -> CALLBACK_TYPE: + """Subscribe to static info updates.""" + self.static_info_update_subscriptions.add(callback_) + return partial(self._async_unsubscribe_static_info_updated, callback_) + + @callback + def _async_unsubscribe_static_info_updated( + self, callback_: Callable[[list[EntityInfo]], None] + ) -> None: + """Unsubscribe to static info updates.""" + self.static_info_update_subscriptions.remove(callback_) + @callback def async_subscribe_state_update( self, diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index 3e5a82bbd0b..b16a6e798b7 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -17,7 +17,6 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -149,14 +148,9 @@ class ESPHomeUpdateEntity(CoordinatorEntity[ESPHomeDashboard], UpdateEntity): async def async_added_to_hass(self) -> None: """Handle entity added to Home Assistant.""" await super().async_added_to_hass() - hass = self.hass entry_data = self._entry_data self.async_on_remove( - async_dispatcher_connect( - hass, - entry_data.signal_static_info_updated, - self._handle_device_update, - ) + entry_data.async_subscribe_static_info_updated(self._handle_device_update) ) self.async_on_remove( entry_data.async_subscribe_device_updated(self._handle_device_update) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index e23f020991d..f71b4196be6 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -181,7 +181,9 @@ async def mock_dashboard(hass): class MockESPHomeDevice: """Mock an esphome device.""" - def __init__(self, entry: MockConfigEntry, client: APIClient) -> None: + def __init__( + self, entry: MockConfigEntry, client: APIClient, device_info: DeviceInfo + ) -> None: """Init the mock.""" self.entry = entry self.client = client @@ -193,6 +195,7 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.device_info = device_info def set_state_callback(self, state_callback: Callable[[EntityState], None]) -> None: """Set the state callback.""" @@ -274,8 +277,6 @@ async def _mock_generic_device_entry( ) entry.add_to_hass(hass) - mock_device = MockESPHomeDevice(entry, mock_client) - default_device_info = { "name": "test", "friendly_name": "Test", @@ -284,6 +285,8 @@ async def _mock_generic_device_entry( } device_info = DeviceInfo(**(default_device_info | mock_device_info)) + mock_device = MockESPHomeDevice(entry, mock_client, device_info) + def _subscribe_states(callback: Callable[[EntityState], None]) -> None: """Subscribe to state.""" mock_device.set_state_callback(callback) @@ -302,7 +305,7 @@ async def _mock_generic_device_entry( """Subscribe to home assistant states.""" mock_device.set_home_assistant_state_subscription_callback(on_state_sub) - mock_client.device_info = AsyncMock(return_value=device_info) + mock_client.device_info = AsyncMock(return_value=mock_device.device_info) mock_client.subscribe_voice_assistant = Mock() mock_client.list_entities_services = AsyncMock( return_value=mock_list_entities_services diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 959ad12876d..b3deb2f33ee 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -1,7 +1,6 @@ """Test ESPHome update entities.""" from collections.abc import Awaitable, Callable -import dataclasses from unittest.mock import Mock, patch from aioesphomeapi import APIClient, EntityInfo, EntityState, UserService @@ -18,7 +17,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_send from .conftest import MockESPHomeDevice @@ -176,9 +174,11 @@ async def test_update_entity( async def test_update_static_info( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], mock_dashboard, ) -> None: """Test ESPHome update entity.""" @@ -190,32 +190,25 @@ async def test_update_static_info( ] await async_get_dashboard(hass).async_refresh() - signal_static_info_updated = f"esphome_{mock_config_entry.entry_id}_on_list" - runtime_data = Mock( - available=True, - device_info=mock_device_info, - signal_static_info_updated=signal_static_info_updated, + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], ) - with patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=runtime_data, - ): - assert await hass.config_entries.async_forward_entry_setup( - mock_config_entry, "update" - ) - - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is not None - assert state.state == "on" + assert state.state == STATE_ON - runtime_data.device_info = dataclasses.replace( - runtime_data.device_info, esphome_version="1.2.3" - ) - async_dispatcher_send(hass, signal_static_info_updated, []) + object.__setattr__(mock_device.device_info, "esphome_version", "1.2.3") + await mock_device.mock_disconnect(True) + await mock_device.mock_connect() - state = hass.states.get("update.none_firmware") - assert state.state == "off" + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("update.test_firmware") + assert state.state == STATE_OFF @pytest.mark.parametrize( From c9ff618ef0f747ff9923c25e5dea1c5594dafd02 Mon Sep 17 00:00:00 2001 From: nyangogo <7449028+miawgogo@users.noreply.github.com> Date: Wed, 24 Apr 2024 15:19:44 +0100 Subject: [PATCH 377/426] Add nfandroidtv type checking and allow for strings to be passed to the image and icon data (#108652) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * nfandroidtv - add type checking and allow for strings to be passed to the image and icon data * nfandroidtv - wrong argument name * nfandroidtv - put the icon in the wrong varible 🙃 * nfandroidtv - raise ServiceValidationError instead of logging --------- Co-authored-by: nyangogo <7449028+ioangogo@users.noreply.github.com> --- .../components/nfandroidtv/notify.py | 58 ++++++++++++++----- .../components/nfandroidtv/strings.json | 8 +++ 2 files changed, 52 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/nfandroidtv/notify.py b/homeassistant/components/nfandroidtv/notify.py index dd42a0ab10b..dd6b15400d9 100644 --- a/homeassistant/components/nfandroidtv/notify.py +++ b/homeassistant/components/nfandroidtv/notify.py @@ -19,6 +19,7 @@ from homeassistant.components.notify import ( ) from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -44,6 +45,7 @@ from .const import ( ATTR_POSITION, ATTR_TRANSPARENCY, DEFAULT_TIMEOUT, + DOMAIN, ) _LOGGER = logging.getLogger(__name__) @@ -133,21 +135,49 @@ class NFAndroidTVNotificationService(BaseNotificationService): "Invalid interrupt-value: %s", data.get(ATTR_INTERRUPT) ) if imagedata := data.get(ATTR_IMAGE): - image_file = self.load_file( - url=imagedata.get(ATTR_IMAGE_URL), - local_path=imagedata.get(ATTR_IMAGE_PATH), - username=imagedata.get(ATTR_IMAGE_USERNAME), - password=imagedata.get(ATTR_IMAGE_PASSWORD), - auth=imagedata.get(ATTR_IMAGE_AUTH), - ) + if isinstance(imagedata, str): + image_file = ( + self.load_file(url=imagedata) + if imagedata.startswith("http") + else self.load_file(local_path=imagedata) + ) + elif isinstance(imagedata, dict): + image_file = self.load_file( + url=imagedata.get(ATTR_IMAGE_URL), + local_path=imagedata.get(ATTR_IMAGE_PATH), + username=imagedata.get(ATTR_IMAGE_USERNAME), + password=imagedata.get(ATTR_IMAGE_PASSWORD), + auth=imagedata.get(ATTR_IMAGE_AUTH), + ) + else: + raise ServiceValidationError( + "Invalid image provided", + translation_domain=DOMAIN, + translation_key="invalid_notification_image", + translation_placeholders={"type": type(imagedata).__name__}, + ) if icondata := data.get(ATTR_ICON): - icon = self.load_file( - url=icondata.get(ATTR_ICON_URL), - local_path=icondata.get(ATTR_ICON_PATH), - username=icondata.get(ATTR_ICON_USERNAME), - password=icondata.get(ATTR_ICON_PASSWORD), - auth=icondata.get(ATTR_ICON_AUTH), - ) + if isinstance(icondata, str): + icondata = ( + self.load_file(url=icondata) + if icondata.startswith("http") + else self.load_file(local_path=icondata) + ) + elif isinstance(icondata, dict): + icon = self.load_file( + url=icondata.get(ATTR_ICON_URL), + local_path=icondata.get(ATTR_ICON_PATH), + username=icondata.get(ATTR_ICON_USERNAME), + password=icondata.get(ATTR_ICON_PASSWORD), + auth=icondata.get(ATTR_ICON_AUTH), + ) + else: + raise ServiceValidationError( + "Invalid Icon provided", + translation_domain=DOMAIN, + translation_key="invalid_notification_icon", + translation_placeholders={"type": type(icondata).__name__}, + ) self.notify.send( message, title=title, diff --git a/homeassistant/components/nfandroidtv/strings.json b/homeassistant/components/nfandroidtv/strings.json index cde02327712..e73fc68d66a 100644 --- a/homeassistant/components/nfandroidtv/strings.json +++ b/homeassistant/components/nfandroidtv/strings.json @@ -1,4 +1,12 @@ { + "exceptions": { + "invalid_notification_icon": { + "message": "Invalid icon data provided. Got {type}" + }, + "invalid_notification_image": { + "message": "Invalid image data provided. Got {type}" + } + }, "config": { "step": { "user": { From bbaa0c16cc0547b485396d793229a6daf6d97b80 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Wed, 24 Apr 2024 16:33:14 +0200 Subject: [PATCH 378/426] Cancel timer on enphase_envoy config entry unload (#111406) * lingeringtimer * Add async_cleanup to enphase_envoy_coordinator and call from unload_entry --- homeassistant/components/enphase_envoy/__init__.py | 2 ++ homeassistant/components/enphase_envoy/coordinator.py | 6 ++++++ tests/components/enphase_envoy/conftest.py | 2 +- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index 2407f807eb7..2cdba43453e 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -46,6 +46,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + coordinator = hass.data[DOMAIN][entry.entry_id] + await coordinator.async_cleanup() unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index a508d5127d6..c0852fca807 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -159,3 +159,9 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): return envoy_data.raw raise RuntimeError("Unreachable code in _async_update_data") # pragma: no cover + + async def async_cleanup(self) -> None: + """Cleanup coordinator.""" + if self._cancel_token_refresh: + self._cancel_token_refresh() + self._cancel_token_refresh = None diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 4d50f026c55..965af3b40fc 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -343,7 +343,7 @@ def mock_envoy_fixture( @pytest.fixture(name="setup_enphase_envoy") -async def setup_enphase_envoy_fixture(hass, config, mock_envoy): +async def setup_enphase_envoy_fixture(hass: HomeAssistant, config, mock_envoy): """Define a fixture to set up Enphase Envoy.""" with ( patch( From 169b9b0bfe65aaec56d3d3c111e5ddcc2cf04218 Mon Sep 17 00:00:00 2001 From: Phil Bruckner Date: Wed, 24 Apr 2024 09:47:03 -0500 Subject: [PATCH 379/426] Fix removing suggested_display_precision from entity registry (#110671) * Fix removing suggested_display_precision from entity registry * Fix tests * Update homeassistant/components/sensor/__init__.py --------- Co-authored-by: Erik --- homeassistant/components/sensor/__init__.py | 5 --- tests/components/sensor/test_init.py | 35 +++++++++++++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index ad6b3454ea9..a955e861c20 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -786,11 +786,6 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ratio_log = floor(ratio_log) if ratio_log > 0 else ceil(ratio_log) display_precision = max(0, display_precision + ratio_log) - if display_precision is None and ( - DOMAIN not in self.registry_entry.options - or "suggested_display_precision" not in self.registry_entry.options - ): - return sensor_options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {}) if ( "suggested_display_precision" in sensor_options diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 74fd81188cd..079984476b0 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -1618,6 +1618,41 @@ async def test_suggested_precision_option_update( } +async def test_suggested_precision_option_removal( + hass: HomeAssistant, +) -> None: + """Test suggested precision stored in the registry is removed.""" + + entity_registry = er.async_get(hass) + + # Pre-register entities + entry = entity_registry.async_get_or_create("sensor", "test", "very_unique") + entity_registry.async_update_entity_options( + entry.entity_id, + "sensor", + { + "suggested_display_precision": 1, + }, + ) + + entity0 = MockSensor( + name="Test", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.HOURS, + native_value="1.5", + suggested_display_precision=None, + unique_id="very_unique", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) + await hass.async_block_till_done() + + # Assert the suggested precision is no longer stored in the registry + entry = entity_registry.async_get(entity0.entity_id) + assert entry.options.get("sensor", {}).get("suggested_display_precision") is None + + @pytest.mark.parametrize( ( "unit_system", From e47e62cbbf45e717f82e2240be48288114a0f6fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 24 Apr 2024 16:58:46 +0200 Subject: [PATCH 380/426] Reduce duplicate code in enphase_envoy (#116107) Also converts a coro to a callback function since nothing was being awaited --- homeassistant/components/enphase_envoy/__init__.py | 4 ++-- homeassistant/components/enphase_envoy/coordinator.py | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index 2cdba43453e..322f909437a 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -46,8 +46,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] - await coordinator.async_cleanup() + coordinator: EnphaseUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator.async_cancel_token_refresh() unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index c0852fca807..04f93098ad9 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -83,9 +83,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): def _async_mark_setup_complete(self) -> None: """Mark setup as complete and setup token refresh if needed.""" self._setup_complete = True - if self._cancel_token_refresh: - self._cancel_token_refresh() - self._cancel_token_refresh = None + self.async_cancel_token_refresh() if not isinstance(self.envoy.auth, EnvoyTokenAuth): return self._cancel_token_refresh = async_track_time_interval( @@ -160,8 +158,9 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): raise RuntimeError("Unreachable code in _async_update_data") # pragma: no cover - async def async_cleanup(self) -> None: - """Cleanup coordinator.""" + @callback + def async_cancel_token_refresh(self) -> None: + """Cancel token refresh.""" if self._cancel_token_refresh: self._cancel_token_refresh() self._cancel_token_refresh = None From 380f192c93d87287782d68e1f7b7e8a6565eea14 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Wed, 24 Apr 2024 11:06:24 -0400 Subject: [PATCH 381/426] Expose the SkyConnect integration with a firmware config/options flow (#115363) Co-authored-by: Stefan Agner Co-authored-by: Martin Hjelmare Co-authored-by: Erik --- .../homeassistant_sky_connect/__init__.py | 117 +- .../homeassistant_sky_connect/config_flow.py | 630 +++++++++- .../homeassistant_sky_connect/const.py | 11 + .../homeassistant_sky_connect/hardware.py | 2 +- .../homeassistant_sky_connect/manifest.json | 2 +- .../homeassistant_sky_connect/strings.json | 128 +- .../homeassistant_sky_connect/util.py | 140 ++- .../zha/repairs/wrong_silabs_firmware.py | 9 +- homeassistant/generated/integrations.json | 5 + script/hassfest/dependencies.py | 1 + .../test_config_flow.py | 1096 ++++++++++++----- .../test_config_flow_failures.py | 920 ++++++++++++++ .../test_hardware.py | 22 +- .../homeassistant_sky_connect/test_init.py | 393 +----- .../homeassistant_sky_connect/test_util.py | 203 +++ tests/components/usb/__init__.py | 16 + tests/components/zha/test_repairs.py | 10 +- 17 files changed, 2943 insertions(+), 762 deletions(-) create mode 100644 tests/components/homeassistant_sky_connect/test_config_flow_failures.py create mode 100644 tests/components/homeassistant_sky_connect/test_util.py diff --git a/homeassistant/components/homeassistant_sky_connect/__init__.py b/homeassistant/components/homeassistant_sky_connect/__init__.py index a85a1161792..fc02f31f263 100644 --- a/homeassistant/components/homeassistant_sky_connect/__init__.py +++ b/homeassistant/components/homeassistant_sky_connect/__init__.py @@ -2,87 +2,62 @@ from __future__ import annotations -from homeassistant.components import usb -from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - check_multi_pan_addon, - get_zigbee_socket, - multi_pan_addon_using_device, -) -from homeassistant.config_entries import SOURCE_HARDWARE, ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError -from homeassistant.helpers import discovery_flow +import logging -from .const import DOMAIN -from .util import get_hardware_variant, get_usb_service_info +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from .util import guess_firmware_type -async def _async_usb_scan_done(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Finish Home Assistant SkyConnect config entry setup.""" - matcher = usb.USBCallbackMatcher( - domain=DOMAIN, - vid=entry.data["vid"].upper(), - pid=entry.data["pid"].upper(), - serial_number=entry.data["serial_number"].lower(), - manufacturer=entry.data["manufacturer"].lower(), - description=entry.data["description"].lower(), - ) - - if not usb.async_is_plugged_in(hass, matcher): - # The USB dongle is not plugged in, remove the config entry - hass.async_create_task( - hass.config_entries.async_remove(entry.entry_id), eager_start=True - ) - return - - usb_dev = entry.data["device"] - # The call to get_serial_by_id can be removed in HA Core 2024.1 - dev_path = await hass.async_add_executor_job(usb.get_serial_by_id, usb_dev) - - if not await multi_pan_addon_using_device(hass, dev_path): - usb_info = get_usb_service_info(entry) - await hass.config_entries.flow.async_init( - "zha", - context={"source": "usb"}, - data=usb_info, - ) - return - - hw_variant = get_hardware_variant(entry) - hw_discovery_data = { - "name": f"{hw_variant.short_name} Multiprotocol", - "port": { - "path": get_zigbee_socket(), - }, - "radio_type": "ezsp", - } - discovery_flow.async_create_flow( - hass, - "zha", - context={"source": SOURCE_HARDWARE}, - data=hw_discovery_data, - ) +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a Home Assistant SkyConnect config entry.""" - - try: - await check_multi_pan_addon(hass) - except HomeAssistantError as err: - raise ConfigEntryNotReady from err - - @callback - def async_usb_scan_done() -> None: - """Handle usb discovery started.""" - hass.async_create_task(_async_usb_scan_done(hass, entry), eager_start=True) - - unsub_usb = usb.async_register_initial_scan_callback(hass, async_usb_scan_done) - entry.async_on_unload(unsub_usb) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return True + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + + _LOGGER.debug( + "Migrating from version %s:%s", config_entry.version, config_entry.minor_version + ) + + if config_entry.version == 1: + if config_entry.minor_version == 1: + # Add-on startup with type service get started before Core, always (e.g. the + # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, + # so we can't safely probe here. Instead, we must make an educated guess! + firmware_guess = await guess_firmware_type( + hass, config_entry.data["device"] + ) + + new_data = {**config_entry.data} + new_data["firmware"] = firmware_guess.firmware_type.value + + # Copy `description` to `product` + new_data["product"] = new_data["description"] + + hass.config_entries.async_update_entry( + config_entry, + data=new_data, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True + + # This means the user has downgraded from a future version + return False diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index 3a3d32c2888..6ffb2783165 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -2,29 +2,498 @@ from __future__ import annotations +from abc import ABC, abstractmethod +import asyncio +import logging from typing import Any +from universal_silabs_flasher.const import ApplicationType + from homeassistant.components import usb +from homeassistant.components.hassio import ( + AddonError, + AddonInfo, + AddonManager, + AddonState, + is_hassio, +) from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components.zha.repairs.wrong_silabs_firmware import ( + probe_silabs_firmware_type, +) +from homeassistant.config_entries import ( + ConfigEntry, + ConfigEntryBaseFlow, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, + OptionsFlowWithConfigEntry, +) from homeassistant.core import callback +from homeassistant.data_entry_flow import AbortFlow -from .const import DOMAIN, HardwareVariant -from .util import get_hardware_variant, get_usb_service_info +from .const import DOCS_WEB_FLASHER_URL, DOMAIN, ZHA_DOMAIN, HardwareVariant +from .util import ( + get_hardware_variant, + get_otbr_addon_manager, + get_usb_service_info, + get_zha_device_path, + get_zigbee_flasher_addon_manager, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_PICK_FIRMWARE_THREAD = "pick_firmware_thread" +STEP_PICK_FIRMWARE_ZIGBEE = "pick_firmware_zigbee" -class HomeAssistantSkyConnectConfigFlow(ConfigFlow, domain=DOMAIN): +class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC): + """Base flow to install firmware.""" + + _failed_addon_name: str + _failed_addon_reason: str + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate base flow.""" + super().__init__(*args, **kwargs) + + self._usb_info: usb.UsbServiceInfo | None = None + self._hw_variant: HardwareVariant | None = None + self._probed_firmware_type: ApplicationType | None = None + + self.addon_install_task: asyncio.Task | None = None + self.addon_start_task: asyncio.Task | None = None + self.addon_uninstall_task: asyncio.Task | None = None + + def _get_translation_placeholders(self) -> dict[str, str]: + """Shared translation placeholders.""" + placeholders = { + "model": ( + self._hw_variant.full_name + if self._hw_variant is not None + else "unknown" + ), + "firmware_type": ( + self._probed_firmware_type.value + if self._probed_firmware_type is not None + else "unknown" + ), + "docs_web_flasher_url": DOCS_WEB_FLASHER_URL, + } + + self.context["title_placeholders"] = placeholders + + return placeholders + + async def _async_set_addon_config( + self, config: dict, addon_manager: AddonManager + ) -> None: + """Set add-on config.""" + try: + await addon_manager.async_set_addon_options(config) + except AddonError as err: + _LOGGER.error(err) + raise AbortFlow( + "addon_set_config_failed", + description_placeholders=self._get_translation_placeholders(), + ) from err + + async def _async_get_addon_info(self, addon_manager: AddonManager) -> AddonInfo: + """Return add-on info.""" + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError as err: + _LOGGER.error(err) + raise AbortFlow( + "addon_info_failed", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": addon_manager.addon_name, + }, + ) from err + + return addon_info + + async def async_step_pick_firmware( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread or Zigbee firmware.""" + assert self._usb_info is not None + + self._probed_firmware_type = await probe_silabs_firmware_type( + self._usb_info.device, + probe_methods=( + # We probe in order of frequency: Zigbee, Thread, then multi-PAN + ApplicationType.GECKO_BOOTLOADER, + ApplicationType.EZSP, + ApplicationType.SPINEL, + ApplicationType.CPC, + ), + ) + + if self._probed_firmware_type not in ( + ApplicationType.EZSP, + ApplicationType.SPINEL, + ApplicationType.CPC, + ): + return self.async_abort( + reason="unsupported_firmware", + description_placeholders=self._get_translation_placeholders(), + ) + + return self.async_show_menu( + step_id="pick_firmware", + menu_options=[ + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + ], + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Zigbee firmware.""" + # Allow the stick to be used with ZHA without flashing + if self._probed_firmware_type == ApplicationType.EZSP: + return await self.async_step_confirm_zigbee() + + if not is_hassio(self.hass): + return self.async_abort( + reason="not_hassio", + description_placeholders=self._get_translation_placeholders(), + ) + + # Only flash new firmware if we need to + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(fw_flasher_manager) + + if addon_info.state == AddonState.NOT_INSTALLED: + return await self.async_step_install_zigbee_flasher_addon() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_run_zigbee_flasher_addon() + + # If the addon is already installed and running, fail + return self.async_abort( + reason="addon_already_running", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + ) + + async def async_step_install_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show progress dialog for installing the Zigbee flasher addon.""" + return await self._install_addon( + get_zigbee_flasher_addon_manager(self.hass), + "install_zigbee_flasher_addon", + "run_zigbee_flasher_addon", + ) + + async def _install_addon( + self, + addon_manager: silabs_multiprotocol_addon.WaitingAddonManager, + step_id: str, + next_step_id: str, + ) -> ConfigFlowResult: + """Show progress dialog for installing an addon.""" + addon_info = await self._async_get_addon_info(addon_manager) + + _LOGGER.debug("Flasher addon state: %s", addon_info) + + if not self.addon_install_task: + self.addon_install_task = self.hass.async_create_task( + addon_manager.async_install_addon_waiting(), + "Addon install", + ) + + if not self.addon_install_task.done(): + return self.async_show_progress( + step_id=step_id, + progress_action="install_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": addon_manager.addon_name, + }, + progress_task=self.addon_install_task, + ) + + try: + await self.addon_install_task + except AddonError as err: + _LOGGER.error(err) + self._failed_addon_name = addon_manager.addon_name + self._failed_addon_reason = "addon_install_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_install_task = None + + return self.async_show_progress_done(next_step_id=next_step_id) + + async def async_step_addon_operation_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Abort when add-on installation or start failed.""" + return self.async_abort( + reason=self._failed_addon_reason, + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": self._failed_addon_name, + }, + ) + + async def async_step_run_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure the flasher addon to point to the SkyConnect and run it.""" + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(fw_flasher_manager) + + assert self._usb_info is not None + new_addon_config = { + **addon_info.options, + "device": self._usb_info.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + + _LOGGER.debug("Reconfiguring flasher addon with %s", new_addon_config) + await self._async_set_addon_config(new_addon_config, fw_flasher_manager) + + if not self.addon_start_task: + + async def start_and_wait_until_done() -> None: + await fw_flasher_manager.async_start_addon_waiting() + # Now that the addon is running, wait for it to finish + await fw_flasher_manager.async_wait_until_addon_state( + AddonState.NOT_RUNNING + ) + + self.addon_start_task = self.hass.async_create_task( + start_and_wait_until_done() + ) + + if not self.addon_start_task.done(): + return self.async_show_progress( + step_id="run_zigbee_flasher_addon", + progress_action="run_zigbee_flasher_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + progress_task=self.addon_start_task, + ) + + try: + await self.addon_start_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + self._failed_addon_name = fw_flasher_manager.addon_name + self._failed_addon_reason = "addon_start_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_start_task = None + + return self.async_show_progress_done( + next_step_id="uninstall_zigbee_flasher_addon" + ) + + async def async_step_uninstall_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Uninstall the flasher addon.""" + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + + if not self.addon_uninstall_task: + _LOGGER.debug("Uninstalling flasher addon") + self.addon_uninstall_task = self.hass.async_create_task( + fw_flasher_manager.async_uninstall_addon_waiting() + ) + + if not self.addon_uninstall_task.done(): + return self.async_show_progress( + step_id="uninstall_zigbee_flasher_addon", + progress_action="uninstall_zigbee_flasher_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + progress_task=self.addon_uninstall_task, + ) + + try: + await self.addon_uninstall_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + # The uninstall failing isn't critical so we can just continue + finally: + self.addon_uninstall_task = None + + return self.async_show_progress_done(next_step_id="confirm_zigbee") + + async def async_step_confirm_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm Zigbee setup.""" + assert self._usb_info is not None + assert self._hw_variant is not None + self._probed_firmware_type = ApplicationType.EZSP + + if user_input is not None: + await self.hass.config_entries.flow.async_init( + ZHA_DOMAIN, + context={"source": "hardware"}, + data={ + "name": self._hw_variant.full_name, + "port": { + "path": self._usb_info.device, + "baudrate": 115200, + "flow_control": "hardware", + }, + "radio_type": "ezsp", + }, + ) + + return self._async_flow_finished() + + return self.async_show_form( + step_id="confirm_zigbee", + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_thread( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread firmware.""" + # We install the OTBR addon no matter what, since it is required to use Thread + if not is_hassio(self.hass): + return self.async_abort( + reason="not_hassio_thread", + description_placeholders=self._get_translation_placeholders(), + ) + + otbr_manager = get_otbr_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(otbr_manager) + + if addon_info.state == AddonState.NOT_INSTALLED: + return await self.async_step_install_otbr_addon() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_start_otbr_addon() + + # If the addon is already installed and running, fail + return self.async_abort( + reason="otbr_addon_already_running", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": otbr_manager.addon_name, + }, + ) + + async def async_step_install_otbr_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show progress dialog for installing the OTBR addon.""" + return await self._install_addon( + get_otbr_addon_manager(self.hass), "install_otbr_addon", "start_otbr_addon" + ) + + async def async_step_start_otbr_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure OTBR to point to the SkyConnect and run the addon.""" + otbr_manager = get_otbr_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(otbr_manager) + + assert self._usb_info is not None + new_addon_config = { + **addon_info.options, + "device": self._usb_info.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + + _LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config) + await self._async_set_addon_config(new_addon_config, otbr_manager) + + if not self.addon_start_task: + self.addon_start_task = self.hass.async_create_task( + otbr_manager.async_start_addon_waiting() + ) + + if not self.addon_start_task.done(): + return self.async_show_progress( + step_id="start_otbr_addon", + progress_action="start_otbr_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": otbr_manager.addon_name, + }, + progress_task=self.addon_start_task, + ) + + try: + await self.addon_start_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + self._failed_addon_name = otbr_manager.addon_name + self._failed_addon_reason = "addon_start_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_start_task = None + + return self.async_show_progress_done(next_step_id="confirm_otbr") + + async def async_step_confirm_otbr( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm OTBR setup.""" + assert self._usb_info is not None + assert self._hw_variant is not None + + self._probed_firmware_type = ApplicationType.SPINEL + + if user_input is not None: + # OTBR discovery is done automatically via hassio + return self._async_flow_finished() + + return self.async_show_form( + step_id="confirm_otbr", + description_placeholders=self._get_translation_placeholders(), + ) + + @abstractmethod + def _async_flow_finished(self) -> ConfigFlowResult: + """Finish the flow.""" + # This should be implemented by a subclass + raise NotImplementedError + + +class HomeAssistantSkyConnectConfigFlow( + BaseFirmwareInstallFlow, ConfigFlow, domain=DOMAIN +): """Handle a config flow for Home Assistant SkyConnect.""" VERSION = 1 + MINOR_VERSION = 2 @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> HomeAssistantSkyConnectOptionsFlow: + ) -> OptionsFlow: """Return the options flow.""" - return HomeAssistantSkyConnectOptionsFlow(config_entry) + firmware_type = ApplicationType(config_entry.data["firmware"]) + + if firmware_type is ApplicationType.CPC: + return HomeAssistantSkyConnectMultiPanOptionsFlowHandler(config_entry) + + return HomeAssistantSkyConnectOptionsFlowHandler(config_entry) async def async_step_usb( self, discovery_info: usb.UsbServiceInfo @@ -37,27 +506,62 @@ class HomeAssistantSkyConnectConfigFlow(ConfigFlow, domain=DOMAIN): manufacturer = discovery_info.manufacturer description = discovery_info.description unique_id = f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}" + if await self.async_set_unique_id(unique_id): self._abort_if_unique_id_configured(updates={"device": device}) + discovery_info.device = await self.hass.async_add_executor_job( + usb.get_serial_by_id, discovery_info.device + ) + + self._usb_info = discovery_info + assert description is not None - hw_variant = HardwareVariant.from_usb_product_name(description) + self._hw_variant = HardwareVariant.from_usb_product_name(description) + + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm a discovery.""" + self._set_confirm_only() + + # Without confirmation, discovery can automatically progress into parts of the + # config flow logic that interacts with hardware. + if user_input is not None: + return await self.async_step_pick_firmware() + + return self.async_show_form( + step_id="confirm", + description_placeholders=self._get_translation_placeholders(), + ) + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._usb_info is not None + assert self._hw_variant is not None + assert self._probed_firmware_type is not None return self.async_create_entry( - title=hw_variant.full_name, + title=self._hw_variant.full_name, data={ - "device": device, - "vid": vid, - "pid": pid, - "serial_number": serial_number, - "manufacturer": manufacturer, - "description": description, + "vid": self._usb_info.vid, + "pid": self._usb_info.pid, + "serial_number": self._usb_info.serial_number, + "manufacturer": self._usb_info.manufacturer, + "description": self._usb_info.description, # For backwards compatibility + "product": self._usb_info.description, + "device": self._usb_info.device, + "firmware": self._probed_firmware_type.value, }, ) -class HomeAssistantSkyConnectOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandler): - """Handle an option flow for Home Assistant SkyConnect.""" +class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( + silabs_multiprotocol_addon.OptionsFlowHandler +): + """Multi-PAN options flow for Home Assistant SkyConnect.""" async def _async_serial_port_settings( self, @@ -92,3 +596,97 @@ class HomeAssistantSkyConnectOptionsFlow(silabs_multiprotocol_addon.OptionsFlowH def _hardware_name(self) -> str: """Return the name of the hardware.""" return self._hw_variant.full_name + + +class HomeAssistantSkyConnectOptionsFlowHandler( + BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry +): + """Zigbee and Thread options flow handlers.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._usb_info = get_usb_service_info(self.config_entry) + self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) + self._hw_variant = HardwareVariant.from_usb_product_name( + self.config_entry.data["product"] + ) + + # Make `context` a regular dictionary + self.context = {} + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options flow.""" + # Don't probe the running firmware, we load it from the config entry + return self.async_show_menu( + step_id="pick_firmware", + menu_options=[ + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + ], + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Zigbee firmware.""" + assert self._usb_info is not None + + if is_hassio(self.hass): + otbr_manager = get_otbr_addon_manager(self.hass) + otbr_addon_info = await self._async_get_addon_info(otbr_manager) + + if ( + otbr_addon_info.state != AddonState.NOT_INSTALLED + and otbr_addon_info.options.get("device") == self._usb_info.device + ): + raise AbortFlow( + "otbr_still_using_stick", + description_placeholders=self._get_translation_placeholders(), + ) + + return await super().async_step_pick_firmware_zigbee(user_input) + + async def async_step_pick_firmware_thread( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread firmware.""" + assert self._usb_info is not None + + zha_entries = self.hass.config_entries.async_entries( + ZHA_DOMAIN, + include_ignore=False, + include_disabled=True, + ) + + if zha_entries and get_zha_device_path(zha_entries[0]) == self._usb_info.device: + raise AbortFlow( + "zha_still_using_stick", + description_placeholders=self._get_translation_placeholders(), + ) + + return await super().async_step_pick_firmware_thread(user_input) + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._usb_info is not None + assert self._hw_variant is not None + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + "firmware": self._probed_firmware_type.value, + }, + options=self.config_entry.options, + ) + + return self.async_create_entry(title="", data={}) diff --git a/homeassistant/components/homeassistant_sky_connect/const.py b/homeassistant/components/homeassistant_sky_connect/const.py index 1dd1471c470..1d6c16dc528 100644 --- a/homeassistant/components/homeassistant_sky_connect/const.py +++ b/homeassistant/components/homeassistant_sky_connect/const.py @@ -5,6 +5,17 @@ import enum from typing import Self DOMAIN = "homeassistant_sky_connect" +ZHA_DOMAIN = "zha" + +DOCS_WEB_FLASHER_URL = "https://skyconnect.home-assistant.io/firmware-update/" + +OTBR_ADDON_NAME = "OpenThread Border Router" +OTBR_ADDON_MANAGER_DATA = "openthread_border_router" +OTBR_ADDON_SLUG = "core_openthread_border_router" + +ZIGBEE_FLASHER_ADDON_NAME = "Silicon Labs Flasher" +ZIGBEE_FLASHER_ADDON_MANAGER_DATA = "silabs_flasher" +ZIGBEE_FLASHER_ADDON_SLUG = "core_silabs_flasher" @dataclasses.dataclass(frozen=True) diff --git a/homeassistant/components/homeassistant_sky_connect/hardware.py b/homeassistant/components/homeassistant_sky_connect/hardware.py index a9abeb27737..2872077111a 100644 --- a/homeassistant/components/homeassistant_sky_connect/hardware.py +++ b/homeassistant/components/homeassistant_sky_connect/hardware.py @@ -25,7 +25,7 @@ def async_info(hass: HomeAssistant) -> list[HardwareInfo]: pid=entry.data["pid"], serial_number=entry.data["serial_number"], manufacturer=entry.data["manufacturer"], - description=entry.data["description"], + description=entry.data["product"], ), name=get_hardware_variant(entry).full_name, url=DOCUMENTATION_URL, diff --git a/homeassistant/components/homeassistant_sky_connect/manifest.json b/homeassistant/components/homeassistant_sky_connect/manifest.json index f56fd24de61..c90ea2c075f 100644 --- a/homeassistant/components/homeassistant_sky_connect/manifest.json +++ b/homeassistant/components/homeassistant_sky_connect/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "dependencies": ["hardware", "usb", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_sky_connect", - "integration_type": "hardware", + "integration_type": "device", "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/homeassistant_sky_connect/strings.json b/homeassistant/components/homeassistant_sky_connect/strings.json index 825649ef0d3..792406dcb02 100644 --- a/homeassistant/components/homeassistant_sky_connect/strings.json +++ b/homeassistant/components/homeassistant_sky_connect/strings.json @@ -57,6 +57,50 @@ "start_flasher_addon": { "title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::title%]", "description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::description%]" + }, + "confirm": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm::description%]" + }, + "pick_firmware": { + "title": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::description%]", + "menu_options": { + "pick_firmware_thread": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::menu_options::pick_firmware_thread%]", + "pick_firmware_zigbee": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::menu_options::pick_firmware_zigbee%]" + } + }, + "install_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::install_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::install_zigbee_flasher_addon::description%]" + }, + "run_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::run_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::run_zigbee_flasher_addon::description%]" + }, + "zigbee_flasher_failed": { + "title": "[%key:component::homeassistant_sky_connect::config::step::zigbee_flasher_failed::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::zigbee_flasher_failed::description%]" + }, + "confirm_zigbee": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm_zigbee::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm_zigbee::description%]" + }, + "install_otbr_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::install_otbr_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::install_otbr_addon::description%]" + }, + "start_otbr_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::start_otbr_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::start_otbr_addon::description%]" + }, + "otbr_failed": { + "title": "[%key:component::homeassistant_sky_connect::config::step::otbr_failed::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::otbr_failed::description%]" + }, + "confirm_otbr": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm_otbr::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm_otbr::description%]" } }, "error": { @@ -68,12 +112,92 @@ "addon_already_running": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_already_running%]", "addon_set_config_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_set_config_failed%]", "addon_start_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_start_failed%]", + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", - "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]" + "not_hassio_thread": "[%key:component::homeassistant_sky_connect::config::abort::not_hassio_thread%]", + "otbr_addon_already_running": "[%key:component::homeassistant_sky_connect::config::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.", + "otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again." }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", - "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]" + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::install_zigbee_flasher_addon%]", + "run_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::run_zigbee_flasher_addon%]", + "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::uninstall_zigbee_flasher_addon%]" + } + }, + "config": { + "flow_title": "{model}", + "step": { + "confirm": { + "title": "Set up the {model}", + "description": "The {model} can be used as either a Thread border router or a Zigbee coordinator. In the next step, you will choose which firmware will be configured." + }, + "pick_firmware": { + "title": "Pick your firmware", + "description": "The {model} can be used as a Thread border router or a Zigbee coordinator.", + "menu_options": { + "pick_firmware_thread": "Use as a Thread border router", + "pick_firmware_zigbee": "Use as a Zigbee coordinator" + } + }, + "install_zigbee_flasher_addon": { + "title": "Installing flasher", + "description": "Installing the Silicon Labs Flasher add-on." + }, + "run_zigbee_flasher_addon": { + "title": "Installing Zigbee firmware", + "description": "Installing Zigbee firmware. This will take about a minute." + }, + "uninstall_zigbee_flasher_addon": { + "title": "Removing flasher", + "description": "Removing the Silicon Labs Flasher add-on." + }, + "zigbee_flasher_failed": { + "title": "Zigbee installation failed", + "description": "The Zigbee firmware installation process was unsuccessful. Ensure no other software is trying to communicate with the {model} and try again." + }, + "confirm_zigbee": { + "title": "Zigbee setup complete", + "description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration once you exit." + }, + "install_otbr_addon": { + "title": "Installing OpenThread Border Router add-on", + "description": "The OpenThread Border Router (OTBR) add-on is being installed." + }, + "start_otbr_addon": { + "title": "Starting OpenThread Border Router add-on", + "description": "The OpenThread Border Router (OTBR) add-on is now starting." + }, + "otbr_failed": { + "title": "Failed to setup OpenThread Border Router", + "description": "The OpenThread Border Router add-on installation was unsuccessful. Ensure no other software is trying to communicate with the {model}, you have access to the internet and can install other add-ons, and try again. Check the Supervisor logs if the problem persists." + }, + "confirm_otbr": { + "title": "OpenThread Border Router setup complete", + "description": "Your {model} is now an OpenThread Border Router and will show up in the Thread integration once you exit." + } + }, + "abort": { + "addon_info_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_info_failed%]", + "addon_install_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_install_failed%]", + "addon_already_running": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_already_running%]", + "addon_set_config_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_set_config_failed%]", + "addon_start_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_start_failed%]", + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", + "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", + "not_hassio_thread": "The OpenThread Border Router addon can only be installed with Home Assistant OS. If you would like to use the {model} as an Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.", + "otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again." + }, + "progress": { + "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "The Silicon Labs Flasher addon is installed, this may take a few minutes.", + "run_zigbee_flasher_addon": "Please wait while Zigbee firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes.", + "uninstall_zigbee_flasher_addon": "The Silicon Labs Flasher addon is being removed." } } } diff --git a/homeassistant/components/homeassistant_sky_connect/util.py b/homeassistant/components/homeassistant_sky_connect/util.py index e1de1d3b442..f242416fa9a 100644 --- a/homeassistant/components/homeassistant_sky_connect/util.py +++ b/homeassistant/components/homeassistant_sky_connect/util.py @@ -2,10 +2,35 @@ from __future__ import annotations -from homeassistant.components import usb -from homeassistant.config_entries import ConfigEntry +from collections import defaultdict +from dataclasses import dataclass +import logging +from typing import cast -from .const import HardwareVariant +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components import usb +from homeassistant.components.hassio import AddonError, AddonState, is_hassio +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + WaitingAddonManager, + get_multiprotocol_addon_manager, +) +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.singleton import singleton + +from .const import ( + OTBR_ADDON_MANAGER_DATA, + OTBR_ADDON_NAME, + OTBR_ADDON_SLUG, + ZHA_DOMAIN, + ZIGBEE_FLASHER_ADDON_MANAGER_DATA, + ZIGBEE_FLASHER_ADDON_NAME, + ZIGBEE_FLASHER_ADDON_SLUG, + HardwareVariant, +) + +_LOGGER = logging.getLogger(__name__) def get_usb_service_info(config_entry: ConfigEntry) -> usb.UsbServiceInfo: @@ -16,10 +41,115 @@ def get_usb_service_info(config_entry: ConfigEntry) -> usb.UsbServiceInfo: pid=config_entry.data["pid"], serial_number=config_entry.data["serial_number"], manufacturer=config_entry.data["manufacturer"], - description=config_entry.data["description"], + description=config_entry.data["product"], ) def get_hardware_variant(config_entry: ConfigEntry) -> HardwareVariant: """Get the hardware variant from the config entry.""" - return HardwareVariant.from_usb_product_name(config_entry.data["description"]) + return HardwareVariant.from_usb_product_name(config_entry.data["product"]) + + +def get_zha_device_path(config_entry: ConfigEntry) -> str: + """Get the device path from a ZHA config entry.""" + return cast(str, config_entry.data["device"]["path"]) + + +@singleton(OTBR_ADDON_MANAGER_DATA) +@callback +def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager: + """Get the OTBR add-on manager.""" + return WaitingAddonManager( + hass, + _LOGGER, + OTBR_ADDON_NAME, + OTBR_ADDON_SLUG, + ) + + +@singleton(ZIGBEE_FLASHER_ADDON_MANAGER_DATA) +@callback +def get_zigbee_flasher_addon_manager(hass: HomeAssistant) -> WaitingAddonManager: + """Get the flasher add-on manager.""" + return WaitingAddonManager( + hass, + _LOGGER, + ZIGBEE_FLASHER_ADDON_NAME, + ZIGBEE_FLASHER_ADDON_SLUG, + ) + + +@dataclass(slots=True, kw_only=True) +class FirmwareGuess: + """Firmware guess.""" + + is_running: bool + firmware_type: ApplicationType + source: str + + +async def guess_firmware_type(hass: HomeAssistant, device_path: str) -> FirmwareGuess: + """Guess the firmware type based on installed addons and other integrations.""" + device_guesses: defaultdict[str | None, list[FirmwareGuess]] = defaultdict(list) + + for zha_config_entry in hass.config_entries.async_entries(ZHA_DOMAIN): + zha_path = get_zha_device_path(zha_config_entry) + device_guesses[zha_path].append( + FirmwareGuess( + is_running=(zha_config_entry.state == ConfigEntryState.LOADED), + firmware_type=ApplicationType.EZSP, + source="zha", + ) + ) + + if is_hassio(hass): + otbr_addon_manager = get_otbr_addon_manager(hass) + + try: + otbr_addon_info = await otbr_addon_manager.async_get_addon_info() + except AddonError: + pass + else: + if otbr_addon_info.state != AddonState.NOT_INSTALLED: + otbr_path = otbr_addon_info.options.get("device") + device_guesses[otbr_path].append( + FirmwareGuess( + is_running=(otbr_addon_info.state == AddonState.RUNNING), + firmware_type=ApplicationType.SPINEL, + source="otbr", + ) + ) + + multipan_addon_manager = await get_multiprotocol_addon_manager(hass) + + try: + multipan_addon_info = await multipan_addon_manager.async_get_addon_info() + except AddonError: + pass + else: + if multipan_addon_info.state != AddonState.NOT_INSTALLED: + multipan_path = multipan_addon_info.options.get("device") + device_guesses[multipan_path].append( + FirmwareGuess( + is_running=(multipan_addon_info.state == AddonState.RUNNING), + firmware_type=ApplicationType.CPC, + source="multiprotocol", + ) + ) + + # Fall back to EZSP if we can't guess the firmware type + if device_path not in device_guesses: + return FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + # Prioritizes guesses that were pulled from a running addon or integration but keep + # the sort order we defined above + guesses = sorted( + device_guesses[device_path], + key=lambda guess: guess.is_running, + ) + + assert guesses + + return guesses[-1] diff --git a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py index 5b1f85e1a29..4ee10c7bb93 100644 --- a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py +++ b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py @@ -74,9 +74,14 @@ def _detect_radio_hardware(hass: HomeAssistant, device: str) -> HardwareType: return HardwareType.OTHER -async def probe_silabs_firmware_type(device: str) -> ApplicationType | None: +async def probe_silabs_firmware_type( + device: str, *, probe_methods: ApplicationType | None = None +) -> ApplicationType | None: """Probe the running firmware on a Silabs device.""" - flasher = Flasher(device=device) + flasher = Flasher( + device=device, + **({"probe_methods": probe_methods} if probe_methods else {}), + ) try: await flasher.probe_app_type() diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index e6a103989d1..cf5f352f22c 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2565,6 +2565,11 @@ "integration_type": "virtual", "supported_by": "netatmo" }, + "homeassistant_sky_connect": { + "name": "Home Assistant SkyConnect", + "integration_type": "device", + "config_flow": true + }, "homematic": { "name": "Homematic", "integrations": { diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index d4eb135a265..66796d4dd0d 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -157,6 +157,7 @@ IGNORE_VIOLATIONS = { ("zha", "homeassistant_hardware"), ("zha", "homeassistant_sky_connect"), ("zha", "homeassistant_yellow"), + ("homeassistant_sky_connect", "zha"), # This should become a helper method that integrations can submit data to ("websocket_api", "lovelace"), ("websocket_api", "shopping_list"), diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index 9647cef4721..c34e3ebe186 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -1,23 +1,31 @@ """Test the Home Assistant SkyConnect config flow.""" -from collections.abc import Generator -import copy -from unittest.mock import Mock, patch +import asyncio +from collections.abc import Awaitable, Callable +from typing import Any +from unittest.mock import AsyncMock, Mock, call, patch import pytest +from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import homeassistant_sky_connect, usb +from homeassistant.components import usb +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + get_multiprotocol_addon_manager, +) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, +) from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.zha import ( - CONF_DEVICE_PATH, - DOMAIN as ZHA_DOMAIN, - RadioType, +from homeassistant.components.homeassistant_sky_connect.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, MockModule, mock_integration +from tests.common import MockConfigEntry USB_DATA_SKY = usb.UsbServiceInfo( device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", @@ -38,340 +46,840 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( ) -@pytest.fixture(autouse=True) -def config_flow_handler(hass: HomeAssistant) -> Generator[None, None, None]: - """Fixture for a test config flow.""" - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.WaitingAddonManager.async_wait_until_addon_state" - ): - yield +def delayed_side_effect() -> Callable[..., Awaitable[None]]: + """Slows down eager tasks by delaying for an event loop tick.""" + + async def side_effect(*args: Any, **kwargs: Any) -> None: + await asyncio.sleep(0) + + return side_effect @pytest.mark.parametrize( - ("usb_data", "title"), + ("usb_data", "model"), [ (USB_DATA_SKY, "Home Assistant SkyConnect"), (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_config_flow( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant +async def test_config_flow_zigbee( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the config flow for SkyConnect.""" - with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - expected_data = { - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - } - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == title - assert result["data"] == expected_data - assert result["options"] == {} - assert len(mock_setup_entry.mock_calls) == 1 - - config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == expected_data - assert config_entry.options == {} - assert config_entry.title == title - assert ( - config_entry.unique_id - == f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data ) + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_multiple_entries( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant -) -> None: - """Test multiple entries are allowed.""" - # Setup an existing config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", - ) - config_entry.add_to_hass(hass) - - usb_data = copy.copy(usb_data) - usb_data.serial_number = "bla_serial_number_2" - + # Next, we probe the firmware with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, # Ensure we re-install it ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" - -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_update_device( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant -) -> None: - """Test updating device path.""" - # Setup an existing config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() ) - config_entry.add_to_hass(hass) - - usb_data = copy.copy(usb_data) - usb_data.device = "bla_device_2" - - with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - assert await hass.config_entries.async_setup(config_entry.entry_id) - assert len(mock_setup_entry.mock_calls) == 1 with ( patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry, + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), patch( - "homeassistant.components.homeassistant_sky_connect.async_unload_entry", - wraps=homeassistant_sky_connect.async_unload_entry, - ) as mock_unload_entry, + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_unload_entry.mock_calls) == 1 + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant ZBT-1"), - ], -) -async def test_option_flow_install_multi_pan_addon( - usb_data: usb.UsbServiceInfo, - title: str, - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon.""" - assert await async_setup_component(hass, "usb", {}) - mock_integration(hass, MockModule("hassio")) + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] - # Setup the config entry - config_entry = MockConfigEntry( - data={ - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - }, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", - ) - config_entry.add_to_hass(hass) + await hass.async_block_till_done(wait_background_tasks=True) - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_init(config_entry.entry_id) + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" + assert result["step_id"] == "confirm_zigbee" - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": usb_data.device, - "baudrate": "115200", - "flow_control": True, - } - }, - ) - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } -def mock_detect_radio_type(radio_type=RadioType.ezsp, ret=True): - """Mock `detect_radio_type` that just sets the appropriate attributes.""" - - async def detect(self): - self.radio_type = radio_type - self.device_settings = radio_type.controller.SCHEMA_DEVICE( - {CONF_DEVICE_PATH: self.device_path} - ) - - return ret - - return detect + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" @pytest.mark.parametrize( - ("usb_data", "title"), + ("usb_data", "model"), [ (USB_DATA_SKY, "Home Assistant SkyConnect"), (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -@patch( - "homeassistant.components.zha.radio_manager.ZhaRadioManager.detect_radio_type", - mock_detect_radio_type(), -) -async def test_option_flow_install_multi_pan_addon_zha( - usb_data: usb.UsbServiceInfo, - title: str, - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, +async def test_config_flow_zigbee_skip_step_if_installed( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: - """Test installing the multi pan addon when a zha config entry exists.""" - assert await async_setup_component(hass, "usb", {}) - mock_integration(hass, MockModule("hassio")) + """Test the config flow for SkyConnect, skip installing the addon if necessary.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) - # Setup the config entry - config_entry = MockConfigEntry( - data={ - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - }, - domain=DOMAIN, + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model + + # Next, we probe the firmware + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, # Ensure we re-install it + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Pick the menu option: we skip installation, instead we directly run it + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + # Uninstall the addon + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Done + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the config flow for SkyConnect.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model + + # Next, we probe the firmware + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + + # Set up Thread firmware + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_already_installed( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the Thread config flow for SkyConnect, addon is already installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", + state=AddonState.NOT_RUNNING, + update_available=False, + version=None, + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_zigbee_to_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Zigbee to Thread.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) - zha_config_entry = MockConfigEntry( - data={"device": {"path": usb_data.device}, "radio_type": "ezsp"}, - domain=ZHA_DOMAIN, - options={}, - title="Yellow", - ) - zha_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == model + + # Pick Thread + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "spinel" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_thread_to_zigbee( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Thread to Zigbee.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == model + + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + # OTBR is not installed + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "ezsp" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_multipan_uninstall( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test options flow for when multi-PAN firmware is installed.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "cpc", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Multi-PAN addon is running + mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) + mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": usb_data.device}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", + return_value=mock_multipan_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + return_value=True, + ), ): result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": usb_data.device, - "baudrate": "115200", - "flow_control": True, - } - }, - ) - # Check the ZHA config entry data is updated - assert zha_config_entry.data == { - "device": { - "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - } - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "addon_menu" + assert "uninstall_addon" in result["menu_options"] diff --git a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py new file mode 100644 index 00000000000..128c812272f --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py @@ -0,0 +1,920 @@ +"""Test the Home Assistant SkyConnect config flow failure cases.""" + +from unittest.mock import AsyncMock, Mock, patch + +import pytest +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components import usb +from homeassistant.components.hassio.addon_manager import ( + AddonError, + AddonInfo, + AddonState, +) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, +) +from homeassistant.components.homeassistant_sky_connect.const import DOMAIN +from homeassistant.components.homeassistant_sky_connect.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .test_config_flow import USB_DATA_ZBT1, delayed_side_effect + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_cannot_probe_firmware( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when firmware cannot be probed.""" + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=None, + ): + # Start the flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + # Probing fails + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "unsupported_firmware" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_not_hassio_wrong_firmware( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup but the firmware is bad.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_hassio" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_already_running( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon is already running.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_already_running" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.side_effect = AddonError() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_install_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_set_config_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_set_addon_options = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_set_config_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon fails to run.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon uninstall fails.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + # Uninstall failure isn't critical + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup and Thread is selected.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_hassio_thread" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.side_effect = AddonError() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_already_running( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when the Thread addon is already running.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "otbr_addon_already_running" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_install_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_set_config_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_set_addon_options = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_set_config_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon fails to run.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon uninstall fails.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + # Uninstall failure isn't critical + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_zigbee_to_thread_zha_configured( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow migration failure, ZHA using the stick.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Set up ZHA as well + zha_config_entry = MockConfigEntry( + domain="zha", + data={"device": {"path": usb_data.device}}, + ) + zha_config_entry.add_to_hass(hass) + + # Confirm options flow + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + # Pick Thread + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "zha_still_using_stick" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_thread_to_zigbee_otbr_configured( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow migration failure, OTBR still using the stick.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Confirm options flow + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + # Pick Zigbee + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": usb_data.device}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "otbr_still_using_stick" diff --git a/tests/components/homeassistant_sky_connect/test_hardware.py b/tests/components/homeassistant_sky_connect/test_hardware.py index 6b283378045..888ed27a3c0 100644 --- a/tests/components/homeassistant_sky_connect/test_hardware.py +++ b/tests/components/homeassistant_sky_connect/test_hardware.py @@ -1,7 +1,5 @@ """Test the Home Assistant SkyConnect hardware platform.""" -from unittest.mock import patch - from homeassistant.components.homeassistant_sky_connect.const import DOMAIN from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant from homeassistant.setup import async_setup_component @@ -15,7 +13,8 @@ CONFIG_ENTRY_DATA = { "pid": "EA60", "serial_number": "9e2adbd75b8beb119fe564a0f320645d", "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", + "product": "SkyConnect v1.0", + "firmware": "ezsp", } CONFIG_ENTRY_DATA_2 = { @@ -24,7 +23,8 @@ CONFIG_ENTRY_DATA_2 = { "pid": "EA60", "serial_number": "9e2adbd75b8beb119fe564a0f320645d", "manufacturer": "Nabu Casa", - "description": "Home Assistant Connect ZBT-1", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", } @@ -42,22 +42,24 @@ async def test_hardware_info( options={}, title="Home Assistant SkyConnect", unique_id="unique_1", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + config_entry_2 = MockConfigEntry( data=CONFIG_ENTRY_DATA_2, domain=DOMAIN, options={}, title="Home Assistant Connect ZBT-1", unique_id="unique_2", + version=1, + minor_version=2, ) config_entry_2.add_to_hass(hass) - with patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + + assert await hass.config_entries.async_setup(config_entry_2.entry_id) client = await hass_ws_client(hass) diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index a6dd5100d7e..88b57f2dd64 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -1,377 +1,56 @@ """Test the Home Assistant SkyConnect integration.""" -from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import patch -import pytest +from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import zha -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.components.homeassistant_sky_connect.util import FirmwareGuess +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -CONFIG_ENTRY_DATA = { - "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - "vid": "10C4", - "pid": "EA60", - "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", - "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", -} +async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: + """Test migrating config entries from v1 to v2 format.""" -@pytest.fixture(autouse=True) -def disable_usb_probing() -> Generator[None, None, None]: - """Disallow touching of system USB devices during unit tests.""" - with patch("homeassistant.components.usb.comports", return_value=[]): - yield - - -@pytest.fixture -def mock_zha_config_flow_setup() -> Generator[None, None, None]: - """Mock the radio connection and probing of the ZHA config flow.""" - - def mock_probe(config: dict[str, Any]) -> None: - # The radio probing will return the correct baudrate - return {**config, "baudrate": 115200} - - mock_connect_app = MagicMock() - mock_connect_app.__aenter__.return_value.backups.backups = [] - - with ( - patch( - "bellows.zigbee.application.ControllerApplication.probe", - side_effect=mock_probe, - ), - patch( - "homeassistant.components.zha.radio_manager.ZhaRadioManager.connect_zigpy_app", - return_value=mock_connect_app, - ), - ): - yield - - -@pytest.mark.parametrize( - ("onboarded", "num_entries", "num_flows"), [(False, 1, 0), (True, 0, 1)] -) -async def test_setup_entry( - mock_zha_config_flow_setup, - hass: HomeAssistant, - addon_store_info, - onboarded, - num_entries, - num_flows, -) -> None: - """Test setup of a config entry, including setup of zha.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=onboarded, - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - matcher = mock_is_plugged_in.mock_calls[0].args[1] - assert matcher["vid"].isupper() - assert matcher["pid"].isupper() - assert matcher["serial_number"].islower() - assert matcher["manufacturer"].islower() - assert matcher["description"].islower() - - # Finish setting up ZHA - if num_entries > 0: - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - assert len(hass.config_entries.flow.async_progress_by_handler("zha")) == num_flows - assert len(hass.config_entries.async_entries("zha")) == num_entries - - -async def test_setup_zha( - mock_zha_config_flow_setup, hass: HomeAssistant, addon_store_info -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - # Finish setting up ZHA - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": CONFIG_ENTRY_DATA["device"], + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "description": "SkyConnect v1.0", }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == CONFIG_ENTRY_DATA["description"] - - -async def test_setup_zha_multipan( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_info.return_value["options"]["device"] = CONFIG_ENTRY_DATA["device"] - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", + version=1, ) + config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": "socket://core-silabs-multiprotocol:9999", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "SkyConnect Multiprotocol" - - -async def test_setup_zha_multipan_other_device( - mock_zha_config_flow_setup, hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_info.return_value["options"]["device"] = "/dev/not_our_sky_connect" - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": CONFIG_ENTRY_DATA["device"], - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == CONFIG_ENTRY_DATA["description"] - - -async def test_setup_entry_wait_usb(hass: HomeAssistant) -> None: - """Test setup of a config entry when the dongle is not plugged in.""" - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) with patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=False, - ) as mock_is_plugged_in: + "homeassistant.components.homeassistant_sky_connect.guess_firmware_type", + return_value=FirmwareGuess( + is_running=True, + firmware_type=ApplicationType.SPINEL, + source="otbr", + ), + ): await hass.config_entries.async_setup(config_entry.entry_id) - assert config_entry.state is ConfigEntryState.LOADED - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - # USB discovery starts, config entry should be removed - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert config_entry.version == 1 + assert config_entry.minor_version == 2 + assert config_entry.data == { + "description": "SkyConnect v1.0", + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", # `description` has been copied to `product` + "firmware": "spinel", # new key + } -async def test_setup_entry_addon_info_fails( - hass: HomeAssistant, addon_store_info -) -> None: - """Test setup of a config entry when fetching addon info fails.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_store_info.side_effect = HassioAPIError("Boom") - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_entry_addon_not_running( - hass: HomeAssistant, addon_installed, start_addon -) -> None: - """Test the addon is started if it is not running.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - start_addon.assert_called_once() + await hass.config_entries.async_unload(config_entry.entry_id) diff --git a/tests/components/homeassistant_sky_connect/test_util.py b/tests/components/homeassistant_sky_connect/test_util.py new file mode 100644 index 00000000000..12ba352eb16 --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_util.py @@ -0,0 +1,203 @@ +"""Test SkyConnect utilities.""" + +from unittest.mock import AsyncMock, patch + +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState +from homeassistant.components.homeassistant_sky_connect.const import ( + DOMAIN, + HardwareVariant, +) +from homeassistant.components.homeassistant_sky_connect.util import ( + FirmwareGuess, + get_hardware_variant, + get_usb_service_info, + get_zha_device_path, + guess_firmware_type, +) +from homeassistant.components.usb import UsbServiceInfo +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +SKYCONNECT_CONFIG_ENTRY = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", + "firmware": "ezsp", + }, + version=2, +) + +CONNECT_ZBT1_CONFIG_ENTRY = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", + }, + version=2, +) + +ZHA_CONFIG_ENTRY = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={ + "device": { + "path": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, + version=4, +) + + +def test_get_usb_service_info() -> None: + """Test `get_usb_service_info` conversion.""" + assert get_usb_service_info(SKYCONNECT_CONFIG_ENTRY) == UsbServiceInfo( + device=SKYCONNECT_CONFIG_ENTRY.data["device"], + vid=SKYCONNECT_CONFIG_ENTRY.data["vid"], + pid=SKYCONNECT_CONFIG_ENTRY.data["pid"], + serial_number=SKYCONNECT_CONFIG_ENTRY.data["serial_number"], + manufacturer=SKYCONNECT_CONFIG_ENTRY.data["manufacturer"], + description=SKYCONNECT_CONFIG_ENTRY.data["product"], + ) + + +def test_get_hardware_variant() -> None: + """Test `get_hardware_variant` extraction.""" + assert get_hardware_variant(SKYCONNECT_CONFIG_ENTRY) == HardwareVariant.SKYCONNECT + assert ( + get_hardware_variant(CONNECT_ZBT1_CONFIG_ENTRY) == HardwareVariant.CONNECT_ZBT1 + ) + + +def test_get_zha_device_path() -> None: + """Test extracting the ZHA device path from its config entry.""" + assert ( + get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] + ) + + +async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: + """Test guessing the firmware type.""" + + assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + +async def test_guess_firmware_type(hass: HomeAssistant) -> None: + """Test guessing the firmware.""" + path = ZHA_CONFIG_ENTRY.data["device"]["path"] + + ZHA_CONFIG_ENTRY.add_to_hass(hass) + + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="zha" + ) + + # When ZHA is running, we indicate as such when guessing + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager = AsyncMock() + mock_multipan_addon_manager = AsyncMock() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.util.is_hassio", + return_value=True, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_otbr_addon_manager", + return_value=mock_otbr_addon_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_multiprotocol_addon_manager", + return_value=mock_multipan_addon_manager, + ), + ): + mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() + mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() + + # Hassio errors are ignored and we still go with ZHA + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.side_effect = None + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": "/some/other/device"}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will prefer ZHA, as it is running (and actually pointing to the device) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will still prefer ZHA, as it is the one actually running + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Finally, ZHA loses out to OTBR + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" + ) + + mock_multipan_addon_manager.async_get_addon_info.side_effect = None + mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Which will lose out to multi-PAN + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" + ) diff --git a/tests/components/usb/__init__.py b/tests/components/usb/__init__.py index f5f32336931..96d671d0958 100644 --- a/tests/components/usb/__init__.py +++ b/tests/components/usb/__init__.py @@ -26,3 +26,19 @@ electro_lama_device = USBDevice( manufacturer=None, description="USB2.0-Serial", ) +skyconnect_macos_correct = USBDevice( + device="/dev/cu.SLAB_USBtoUART", + vid="10C4", + pid="EA60", + serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) +skyconnect_macos_incorrect = USBDevice( + device="/dev/cu.usbserial-2110", + vid="10C4", + pid="EA60", + serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index 5e128cc464a..5b57ec7fcc2 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -12,7 +12,7 @@ from zigpy.application import ControllerApplication import zigpy.backups from zigpy.exceptions import NetworkSettingsInconsistent -from homeassistant.components.homeassistant_sky_connect import ( +from homeassistant.components.homeassistant_sky_connect.const import ( DOMAIN as SKYCONNECT_DOMAIN, ) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN @@ -59,8 +59,10 @@ def test_detect_radio_hardware(hass: HomeAssistant) -> None: "pid": "EA60", "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", + "product": "SkyConnect v1.0", + "firmware": "ezsp", }, + version=2, domain=SKYCONNECT_DOMAIN, options={}, title="Home Assistant SkyConnect", @@ -74,8 +76,10 @@ def test_detect_radio_hardware(hass: HomeAssistant) -> None: "pid": "EA60", "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", "manufacturer": "Nabu Casa", - "description": "Home Assistant Connect ZBT-1", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", }, + version=2, domain=SKYCONNECT_DOMAIN, options={}, title="Home Assistant Connect ZBT-1", From 2beab34de8fdc16f8239ed37034136067bb69805 Mon Sep 17 00:00:00 2001 From: Manuel Dipolt Date: Wed, 24 Apr 2024 17:06:46 +0200 Subject: [PATCH 382/426] Add sensor platform to romy integration (#112388) * poc romy status sensor working * poc romy adc sensor working * code review changes * code review changes base enitity.py see branch romy_binary_sensor * code review change: move CoordinatorEntity to the base class * code review changes: sensors disabled per default * code review: icons.json added * checkout main entity.py * code review changes: sensors enabled per default again * disable rssi sensor per default * Update homeassistant/components/romy/strings.json Co-authored-by: Joost Lekkerkerker * code review changes * code review changes * code review changes * pylint fix --------- Co-authored-by: Joost Lekkerkerker --- .coveragerc | 1 + .../components/romy/binary_sensor.py | 2 +- homeassistant/components/romy/const.py | 2 +- homeassistant/components/romy/icons.json | 17 +++ homeassistant/components/romy/sensor.py | 112 ++++++++++++++++++ homeassistant/components/romy/strings.json | 17 +++ 6 files changed, 149 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/romy/sensor.py diff --git a/.coveragerc b/.coveragerc index ca2cce2719f..1ccb9e461df 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1161,6 +1161,7 @@ omit = homeassistant/components/romy/binary_sensor.py homeassistant/components/romy/coordinator.py homeassistant/components/romy/entity.py + homeassistant/components/romy/sensor.py homeassistant/components/romy/vacuum.py homeassistant/components/roomba/__init__.py homeassistant/components/roomba/binary_sensor.py diff --git a/homeassistant/components/romy/binary_sensor.py b/homeassistant/components/romy/binary_sensor.py index 263c5840e5f..d8f6216007f 100644 --- a/homeassistant/components/romy/binary_sensor.py +++ b/homeassistant/components/romy/binary_sensor.py @@ -62,7 +62,7 @@ class RomyBinarySensor(RomyEntity, BinarySensorEntity): coordinator: RomyVacuumCoordinator, entity_description: BinarySensorEntityDescription, ) -> None: - """Initialize ROMYs StatusSensor.""" + """Initialize the RomyBinarySensor.""" super().__init__(coordinator) self._attr_unique_id = f"{entity_description.key}_{self.romy.unique_id}" self.entity_description = entity_description diff --git a/homeassistant/components/romy/const.py b/homeassistant/components/romy/const.py index 0fa039e8d1b..a41482ffe59 100644 --- a/homeassistant/components/romy/const.py +++ b/homeassistant/components/romy/const.py @@ -6,6 +6,6 @@ import logging from homeassistant.const import Platform DOMAIN = "romy" -PLATFORMS = [Platform.BINARY_SENSOR, Platform.VACUUM] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.VACUUM] UPDATE_INTERVAL = timedelta(seconds=5) LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/romy/icons.json b/homeassistant/components/romy/icons.json index c27b36af64c..3425d5cfade 100644 --- a/homeassistant/components/romy/icons.json +++ b/homeassistant/components/romy/icons.json @@ -15,6 +15,23 @@ "on": "mdi:basket-check" } } + }, + "sensor": { + "dustbin_sensor": { + "default": "mdi:basket-fill" + }, + "total_cleaning_time": { + "default": "mdi:clock" + }, + "total_number_of_cleaning_runs": { + "default": "mdi:counter" + }, + "total_area_cleaned": { + "default": "mdi:texture-box" + }, + "total_distance_driven": { + "default": "mdi:run" + } } } } diff --git a/homeassistant/components/romy/sensor.py b/homeassistant/components/romy/sensor.py new file mode 100644 index 00000000000..bdd486c4f8f --- /dev/null +++ b/homeassistant/components/romy/sensor.py @@ -0,0 +1,112 @@ +"""Sensor checking adc and status values from your ROMY.""" + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + AREA_SQUARE_METERS, + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfLength, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import RomyVacuumCoordinator +from .entity import RomyEntity + +SENSORS: list[SensorEntityDescription] = [ + SensorEntityDescription( + key="battery_level", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="rssi", + entity_registry_enabled_default=False, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="dustbin_sensor", + translation_key="dustbin_sensor", + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_cleaning_time", + translation_key="total_cleaning_time", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_number_of_cleaning_runs", + translation_key="total_number_of_cleaning_runs", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="runs", + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_area_cleaned", + translation_key="total_area_cleaned", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=AREA_SQUARE_METERS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_distance_driven", + translation_key="total_distance_driven", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfLength.METERS, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up ROMY vacuum cleaner.""" + + coordinator: RomyVacuumCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + RomySensor(coordinator, entity_description) + for entity_description in SENSORS + if entity_description.key in coordinator.romy.sensors + ) + + +class RomySensor(RomyEntity, SensorEntity): + """RomySensor Class.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + coordinator: RomyVacuumCoordinator, + entity_description: SensorEntityDescription, + ) -> None: + """Initialize ROMYs StatusSensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{entity_description.key}_{self.romy.unique_id}" + self.entity_description = entity_description + + @property + def native_value(self) -> int: + """Return the value of the sensor.""" + value: int = self.romy.sensors[self.entity_description.key] + return value diff --git a/homeassistant/components/romy/strings.json b/homeassistant/components/romy/strings.json index f4bc4d191ff..78721da17ba 100644 --- a/homeassistant/components/romy/strings.json +++ b/homeassistant/components/romy/strings.json @@ -60,6 +60,23 @@ "water_tank_empty": { "name": "Watertank empty" } + }, + "sensor": { + "dustbin_sensor": { + "name": "Dustbin dirt level" + }, + "total_cleaning_time": { + "name": "Total cleaning time" + }, + "total_number_of_cleaning_runs": { + "name": "Total cleaning runs" + }, + "total_area_cleaned": { + "name": "Total cleaned area" + }, + "total_distance_driven": { + "name": "Total distance driven" + } } } } From f83ee963bfc2efbb7cfdf70ef7e2da749068e6ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Wed, 24 Apr 2024 17:08:56 +0200 Subject: [PATCH 383/426] Add binary sensor entities to Traccar Server (#114719) --- .../components/traccar_server/__init__.py | 6 +- .../traccar_server/binary_sensor.py | 99 +++++++++++++++++++ .../traccar_server/device_tracker.py | 11 +-- .../components/traccar_server/icons.json | 9 ++ .../components/traccar_server/strings.json | 16 +++ .../snapshots/test_diagnostics.ambr | 51 +++++++++- 6 files changed, 177 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/traccar_server/binary_sensor.py diff --git a/homeassistant/components/traccar_server/__init__.py b/homeassistant/components/traccar_server/__init__.py index 703df6cbfa4..c7a65d2d4a8 100644 --- a/homeassistant/components/traccar_server/__init__.py +++ b/homeassistant/components/traccar_server/__init__.py @@ -30,7 +30,11 @@ from .const import ( ) from .coordinator import TraccarServerCoordinator -PLATFORMS: list[Platform] = [Platform.DEVICE_TRACKER, Platform.SENSOR] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.DEVICE_TRACKER, + Platform.SENSOR, +] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/traccar_server/binary_sensor.py b/homeassistant/components/traccar_server/binary_sensor.py new file mode 100644 index 00000000000..6ee5757dcea --- /dev/null +++ b/homeassistant/components/traccar_server/binary_sensor.py @@ -0,0 +1,99 @@ +"""Support for Traccar server binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Generic, Literal, TypeVar, cast + +from pytraccar import DeviceModel + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import TraccarServerCoordinator +from .entity import TraccarServerEntity + +_T = TypeVar("_T") + + +@dataclass(frozen=True, kw_only=True) +class TraccarServerBinarySensorEntityDescription( + Generic[_T], BinarySensorEntityDescription +): + """Describe Traccar Server sensor entity.""" + + data_key: Literal["position", "device", "geofence", "attributes"] + entity_registry_enabled_default = False + entity_category = EntityCategory.DIAGNOSTIC + value_fn: Callable[[_T], bool | None] + + +TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS = ( + TraccarServerBinarySensorEntityDescription[DeviceModel]( + key="attributes.motion", + data_key="position", + translation_key="motion", + device_class=BinarySensorDeviceClass.MOTION, + value_fn=lambda x: x["attributes"].get("motion", False), + ), + TraccarServerBinarySensorEntityDescription[DeviceModel]( + key="status", + data_key="device", + translation_key="status", + value_fn=lambda x: None if (s := x["status"]) == "unknown" else s == "online", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensor entities.""" + coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + TraccarServerBinarySensor( + coordinator=coordinator, + device=entry["device"], + description=cast(TraccarServerBinarySensorEntityDescription, description), + ) + for entry in coordinator.data.values() + for description in TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS + ) + + +class TraccarServerBinarySensor(TraccarServerEntity, BinarySensorEntity): + """Represent a traccar server binary sensor.""" + + _attr_has_entity_name = True + entity_description: TraccarServerBinarySensorEntityDescription + + def __init__( + self, + coordinator: TraccarServerCoordinator, + device: DeviceModel, + description: TraccarServerBinarySensorEntityDescription[_T], + ) -> None: + """Initialize the Traccar Server sensor.""" + super().__init__(coordinator, device) + self.entity_description = description + self._attr_unique_id = ( + f"{device['uniqueId']}_{description.data_key}_{description.key}" + ) + + @property + def is_on(self) -> bool | None: + """Return if the binary sensor is on or not.""" + return self.entity_description.value_fn( + getattr(self, f"traccar_{self.entity_description.data_key}") + ) diff --git a/homeassistant/components/traccar_server/device_tracker.py b/homeassistant/components/traccar_server/device_tracker.py index d15ba084dad..e7dba3ad99d 100644 --- a/homeassistant/components/traccar_server/device_tracker.py +++ b/homeassistant/components/traccar_server/device_tracker.py @@ -9,14 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ( - ATTR_CATEGORY, - ATTR_MOTION, - ATTR_STATUS, - ATTR_TRACCAR_ID, - ATTR_TRACKER, - DOMAIN, -) +from .const import ATTR_CATEGORY, ATTR_TRACCAR_ID, ATTR_TRACKER, DOMAIN from .coordinator import TraccarServerCoordinator from .entity import TraccarServerEntity @@ -46,8 +39,6 @@ class TraccarServerDeviceTracker(TraccarServerEntity, TrackerEntity): return { **self.traccar_attributes, ATTR_CATEGORY: self.traccar_device["category"], - ATTR_MOTION: self.traccar_position["attributes"].get("motion", False), - ATTR_STATUS: self.traccar_device["status"], ATTR_TRACCAR_ID: self.traccar_device["id"], ATTR_TRACKER: DOMAIN, } diff --git a/homeassistant/components/traccar_server/icons.json b/homeassistant/components/traccar_server/icons.json index 59fc663e712..a10b154fbff 100644 --- a/homeassistant/components/traccar_server/icons.json +++ b/homeassistant/components/traccar_server/icons.json @@ -1,5 +1,14 @@ { "entity": { + "binary_sensor": { + "status": { + "default": "mdi:access-point-minus", + "state": { + "off": "mdi:access-point-off", + "on": "mdi:access-point" + } + } + }, "sensor": { "altitude": { "default": "mdi:altimeter" diff --git a/homeassistant/components/traccar_server/strings.json b/homeassistant/components/traccar_server/strings.json index 41adaace77e..8bec4b112ac 100644 --- a/homeassistant/components/traccar_server/strings.json +++ b/homeassistant/components/traccar_server/strings.json @@ -43,6 +43,22 @@ } }, "entity": { + "binary_sensor": { + "motion": { + "name": "Motion", + "state": { + "off": "Stopped", + "on": "Moving" + } + }, + "status": { + "name": "Status", + "state": { + "off": "Offline", + "on": "Online" + } + } + }, "sensor": { "address": { "name": "Address" diff --git a/tests/components/traccar_server/snapshots/test_diagnostics.ambr b/tests/components/traccar_server/snapshots/test_diagnostics.ambr index 300444f10f1..89a6416c303 100644 --- a/tests/components/traccar_server/snapshots/test_diagnostics.ambr +++ b/tests/components/traccar_server/snapshots/test_diagnostics.ambr @@ -82,9 +82,7 @@ 'gps_accuracy': 3.5, 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', - 'motion': False, 'source_type': 'gps', - 'status': 'online', 'traccar_id': 0, 'tracker': 'traccar_server', }), @@ -92,6 +90,29 @@ }), 'unit_of_measurement': None, }), + dict({ + 'disabled': False, + 'enity_id': 'binary_sensor.x_wing_motion', + 'state': dict({ + 'attributes': dict({ + 'device_class': 'motion', + 'friendly_name': 'X-Wing Motion', + }), + 'state': 'off', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'enity_id': 'binary_sensor.x_wing_status', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Status', + }), + 'state': 'on', + }), + 'unit_of_measurement': None, + }), dict({ 'disabled': False, 'enity_id': 'sensor.x_wing_battery', @@ -231,6 +252,18 @@ }), }), 'entities': list([ + dict({ + 'disabled': True, + 'enity_id': 'binary_sensor.x_wing_motion', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'enity_id': 'binary_sensor.x_wing_status', + 'state': None, + 'unit_of_measurement': None, + }), dict({ 'disabled': True, 'enity_id': 'sensor.x_wing_battery', @@ -343,6 +376,18 @@ }), }), 'entities': list([ + dict({ + 'disabled': True, + 'enity_id': 'binary_sensor.x_wing_motion', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'enity_id': 'binary_sensor.x_wing_status', + 'state': None, + 'unit_of_measurement': None, + }), dict({ 'disabled': True, 'enity_id': 'sensor.x_wing_battery', @@ -384,9 +429,7 @@ 'gps_accuracy': 3.5, 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', - 'motion': False, 'source_type': 'gps', - 'status': 'online', 'traccar_id': 0, 'tracker': 'traccar_server', }), From 5c3ffb8f550a36894bb2980491f4fdfd5b7bc30c Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Wed, 24 Apr 2024 17:24:43 +0200 Subject: [PATCH 384/426] Bump ZHA dependencies (#116106) --- homeassistant/components/zha/manifest.json | 4 ++-- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 9b7788ff129..452f11db85b 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,12 +21,12 @@ "universal_silabs_flasher" ], "requirements": [ - "bellows==0.38.1", + "bellows==0.38.2", "pyserial==3.5", "pyserial-asyncio==0.6", "zha-quirks==0.0.115", "zigpy-deconz==0.23.1", - "zigpy==0.63.5", + "zigpy==0.64.0", "zigpy-xbee==0.20.1", "zigpy-zigate==0.12.0", "zigpy-znp==0.12.1", diff --git a/requirements_all.txt b/requirements_all.txt index 256c5c3500e..14e88a30354 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -541,7 +541,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.zha -bellows==0.38.1 +bellows==0.38.2 # homeassistant.components.bmw_connected_drive bimmer-connected[china]==0.14.6 @@ -2959,7 +2959,7 @@ zigpy-zigate==0.12.0 zigpy-znp==0.12.1 # homeassistant.components.zha -zigpy==0.63.5 +zigpy==0.64.0 # homeassistant.components.zoneminder zm-py==0.5.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 63a3563ebaf..9c698476e11 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -466,7 +466,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.zha -bellows==0.38.1 +bellows==0.38.2 # homeassistant.components.bmw_connected_drive bimmer-connected[china]==0.14.6 @@ -2300,7 +2300,7 @@ zigpy-zigate==0.12.0 zigpy-znp==0.12.1 # homeassistant.components.zha -zigpy==0.63.5 +zigpy==0.64.0 # homeassistant.components.zwave_js zwave-js-server-python==0.55.3 From 7d5af09aecbc72b388259620e9a0bbe32e86c57c Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Wed, 24 Apr 2024 17:32:12 +0200 Subject: [PATCH 385/426] Add quality scale to Comelit (#116041) add quality scale --- homeassistant/components/comelit/manifest.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index d93ec349bba..b9264d16f69 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -4,7 +4,9 @@ "codeowners": ["@chemelli74"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/comelit", + "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], + "quality_scale": "silver", "requirements": ["aiocomelit==0.9.0"] } From 41a86d24044b9f4371cf70394525a2f1e546bed4 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Wed, 24 Apr 2024 17:36:31 +0200 Subject: [PATCH 386/426] Add quality scale to Vodafone Station (#116040) Add quality scale --- homeassistant/components/vodafone_station/manifest.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index ced871b7616..7e2e974e709 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -4,7 +4,9 @@ "codeowners": ["@paoloantinori", "@chemelli74"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vodafone_station", + "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], + "quality_scale": "silver", "requirements": ["aiovodafone==0.5.4"] } From d565c1a84bf0b3df764b19837b43e774a6493e44 Mon Sep 17 00:00:00 2001 From: Steve Easley Date: Wed, 24 Apr 2024 11:36:50 -0400 Subject: [PATCH 387/426] Add select platform to jvc_projector component (#111638) * Initial commit of jvc_projector select platform * Move icon to icons.json * Apply suggestions from code review * Update tests/components/jvc_projector/test_select.py --------- Co-authored-by: Erik Montnemery Co-authored-by: Joost Lekkerkerker --- .../components/jvc_projector/__init__.py | 2 +- .../components/jvc_projector/icons.json | 5 ++ .../components/jvc_projector/select.py | 77 +++++++++++++++++++ .../components/jvc_projector/strings.json | 9 +++ tests/components/jvc_projector/test_select.py | 44 +++++++++++ 5 files changed, 136 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/jvc_projector/select.py create mode 100644 tests/components/jvc_projector/test_select.py diff --git a/homeassistant/components/jvc_projector/__init__.py b/homeassistant/components/jvc_projector/__init__.py index 28e4cc995bb..8ce1fb46e3d 100644 --- a/homeassistant/components/jvc_projector/__init__.py +++ b/homeassistant/components/jvc_projector/__init__.py @@ -18,7 +18,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .const import DOMAIN from .coordinator import JvcProjectorDataUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SELECT, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/jvc_projector/icons.json b/homeassistant/components/jvc_projector/icons.json index c70ded78cb4..a0404b328e1 100644 --- a/homeassistant/components/jvc_projector/icons.json +++ b/homeassistant/components/jvc_projector/icons.json @@ -8,6 +8,11 @@ } } }, + "select": { + "input": { + "default": "mdi:hdmi-port" + } + }, "sensor": { "jvc_power_status": { "default": "mdi:power-plug-off", diff --git a/homeassistant/components/jvc_projector/select.py b/homeassistant/components/jvc_projector/select.py new file mode 100644 index 00000000000..1395637fad1 --- /dev/null +++ b/homeassistant/components/jvc_projector/select.py @@ -0,0 +1,77 @@ +"""Select platform for the jvc_projector integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Final + +from jvcprojector import JvcProjector, const + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import JvcProjectorDataUpdateCoordinator +from .const import DOMAIN +from .entity import JvcProjectorEntity + + +@dataclass(frozen=True, kw_only=True) +class JvcProjectorSelectDescription(SelectEntityDescription): + """Describes JVC Projector select entities.""" + + command: Callable[[JvcProjector, str], Awaitable[None]] + + +OPTIONS: Final[dict[str, dict[str, str]]] = { + "input": {const.HDMI1: const.REMOTE_HDMI_1, const.HDMI2: const.REMOTE_HDMI_2} +} + +SELECTS: Final[list[JvcProjectorSelectDescription]] = [ + JvcProjectorSelectDescription( + key="input", + translation_key="input", + options=list(OPTIONS["input"]), + command=lambda device, option: device.remote(OPTIONS["input"][option]), + ) +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the JVC Projector platform from a config entry.""" + coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + async_add_entities( + JvcProjectorSelectEntity(coordinator, description) for description in SELECTS + ) + + +class JvcProjectorSelectEntity(JvcProjectorEntity, SelectEntity): + """Representation of a JVC Projector select entity.""" + + entity_description: JvcProjectorSelectDescription + + def __init__( + self, + coordinator: JvcProjectorDataUpdateCoordinator, + description: JvcProjectorSelectDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.coordinator.data[self.entity_description.key] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.command(self.coordinator.device, option) diff --git a/homeassistant/components/jvc_projector/strings.json b/homeassistant/components/jvc_projector/strings.json index 9991fa1cf67..b89139cbab3 100644 --- a/homeassistant/components/jvc_projector/strings.json +++ b/homeassistant/components/jvc_projector/strings.json @@ -38,6 +38,15 @@ "name": "[%key:component::sensor::entity_component::power::name%]" } }, + "select": { + "input": { + "name": "Input", + "state": { + "hdmi1": "HDMI 1", + "hdmi2": "HDMI 2" + } + } + }, "sensor": { "jvc_power_status": { "name": "Power status", diff --git a/tests/components/jvc_projector/test_select.py b/tests/components/jvc_projector/test_select.py new file mode 100644 index 00000000000..a52133bd688 --- /dev/null +++ b/tests/components/jvc_projector/test_select.py @@ -0,0 +1,44 @@ +"""Tests for JVC Projector select platform.""" + +from unittest.mock import MagicMock + +from jvcprojector import const + +from homeassistant.components.select import ( + ATTR_OPTIONS, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_OPTION +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + +INPUT_ENTITY_ID = "select.jvc_projector_input" + + +async def test_input_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_device: MagicMock, + mock_integration: MockConfigEntry, +) -> None: + """Test input select.""" + entity = hass.states.get(INPUT_ENTITY_ID) + assert entity + assert entity.attributes.get(ATTR_FRIENDLY_NAME) == "JVC Projector Input" + assert entity.attributes.get(ATTR_OPTIONS) == [const.HDMI1, const.HDMI2] + assert entity.state == const.HDMI1 + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: INPUT_ENTITY_ID, + ATTR_OPTION: const.HDMI2, + }, + blocking=True, + ) + + mock_device.remote.assert_called_once_with(const.REMOTE_HDMI_2) From bc7fa8cf9e2b152f448a2ae9dd03e48345e0ea6f Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 24 Apr 2024 11:41:17 -0500 Subject: [PATCH 388/426] Bump intents to 2024.4.24 (#116111) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 8ee27986bb8..82e2adca680 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"] + "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.24"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 74c4d185847..b88f2aefffa 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -33,7 +33,7 @@ hass-nabucasa==0.78.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 home-assistant-frontend==20240424.1 -home-assistant-intents==2024.4.3 +home-assistant-intents==2024.4.24 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.3 diff --git a/requirements_all.txt b/requirements_all.txt index 14e88a30354..75a7411c64b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1081,7 +1081,7 @@ holidays==0.47 home-assistant-frontend==20240424.1 # homeassistant.components.conversation -home-assistant-intents==2024.4.3 +home-assistant-intents==2024.4.24 # homeassistant.components.home_connect homeconnect==0.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9c698476e11..4e8f9ecb69f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -883,7 +883,7 @@ holidays==0.47 home-assistant-frontend==20240424.1 # homeassistant.components.conversation -home-assistant-intents==2024.4.3 +home-assistant-intents==2024.4.24 # homeassistant.components.home_connect homeconnect==0.7.2 From 67021be27456afbfee6e44be180644febd4a36f7 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 24 Apr 2024 19:41:46 +0200 Subject: [PATCH 389/426] Add notification service for Bring component (#109222) * Add notification service for Bring component * change to async * update to new library and raise for urgent message without item name * add icons.json and replace string with reference in strings.json * Incorporate proposed changes from https://github.com/home-assistant/core/pull/115510 * Remove unnecessary exception, rewrite translations strings * remove unused constants --- homeassistant/components/bring/const.py | 8 +++ homeassistant/components/bring/icons.json | 3 ++ homeassistant/components/bring/services.yaml | 23 +++++++++ homeassistant/components/bring/strings.json | 36 ++++++++++++++ homeassistant/components/bring/todo.py | 52 ++++++++++++++++++-- 5 files changed, 119 insertions(+), 3 deletions(-) create mode 100644 homeassistant/components/bring/services.yaml diff --git a/homeassistant/components/bring/const.py b/homeassistant/components/bring/const.py index 64a6ec67f85..911c08a835d 100644 --- a/homeassistant/components/bring/const.py +++ b/homeassistant/components/bring/const.py @@ -1,3 +1,11 @@ """Constants for the Bring! integration.""" +from typing import Final + DOMAIN = "bring" + +ATTR_SENDER: Final = "sender" +ATTR_ITEM_NAME: Final = "item" +ATTR_NOTIFICATION_TYPE: Final = "message" + +SERVICE_PUSH_NOTIFICATION = "send_message" diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index a757b20a4cc..1c6c3bdeca0 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -5,5 +5,8 @@ "default": "mdi:cart" } } + }, + "services": { + "send_message": "mdi:cellphone-message" } } diff --git a/homeassistant/components/bring/services.yaml b/homeassistant/components/bring/services.yaml new file mode 100644 index 00000000000..98d5c68de13 --- /dev/null +++ b/homeassistant/components/bring/services.yaml @@ -0,0 +1,23 @@ +send_message: + target: + entity: + domain: todo + integration: bring + fields: + message: + example: urgent_message + required: true + default: "going_shopping" + selector: + select: + translation_key: "notification_type_selector" + options: + - "going_shopping" + - "changed_list" + - "shopping_done" + - "urgent_message" + item: + example: Cilantro + required: false + selector: + text: diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index 6d61034bea8..e6df885cbbc 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -38,6 +38,42 @@ }, "setup_authentication_exception": { "message": "Authentication failed for {email}, check your email and password" + }, + "notify_missing_argument_item": { + "message": "Failed to call service {service}. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" + }, + "notify_request_failed": { + "message": "Failed to send push notification for bring due to a connection error, try again later" + } + }, + "services": { + "send_message": { + "name": "[%key:component::notify::services::notify::name%]", + "description": "Send a mobile push notification to members of a shared Bring! list.", + "fields": { + "entity_id": { + "name": "List", + "description": "Bring! list whose members (except sender) will be notified." + }, + "message": { + "name": "Notification type", + "description": "Type of push notification to send to list members." + }, + "item": { + "name": "Item (Required if message type `Breaking news` selected)", + "description": "Item name to include in a breaking news message e.g. `Breaking news - Please get cilantro!`" + } + } + } + }, + "selector": { + "notification_type_selector": { + "options": { + "going_shopping": "I'm going shopping! - Last chance for adjustments", + "changed_list": "List changed - Check it out", + "shopping_done": "Shopping done - you can relax", + "urgent_message": "Breaking news - Please get `item`!" + } } } } diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index e631dc32951..5eabcc01553 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -6,7 +6,8 @@ from typing import TYPE_CHECKING import uuid from bring_api.exceptions import BringRequestException -from bring_api.types import BringItem, BringItemOperation +from bring_api.types import BringItem, BringItemOperation, BringNotificationType +import voluptuous as vol from homeassistant.components.todo import ( TodoItem, @@ -16,11 +17,18 @@ from homeassistant.components.todo import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import ( + ATTR_ITEM_NAME, + ATTR_NOTIFICATION_TYPE, + DOMAIN, + SERVICE_PUSH_NOTIFICATION, +) from .coordinator import BringData, BringDataUpdateCoordinator @@ -46,6 +54,21 @@ async def async_setup_entry( for bring_list in coordinator.data.values() ) + platform = entity_platform.async_get_current_platform() + + platform.async_register_entity_service( + SERVICE_PUSH_NOTIFICATION, + make_entity_service_schema( + { + vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( + vol.Upper, cv.enum(BringNotificationType) + ), + vol.Optional(ATTR_ITEM_NAME): cv.string, + } + ), + "async_send_message", + ) + class BringTodoListEntity( CoordinatorEntity[BringDataUpdateCoordinator], TodoListEntity @@ -231,3 +254,26 @@ class BringTodoListEntity( ) from e await self.coordinator.async_refresh() + + async def async_send_message( + self, + message: BringNotificationType, + item: str | None = None, + ) -> None: + """Send a push notification to members of a shared bring list.""" + + try: + await self.coordinator.bring.notify(self._list_uuid, message, item or None) + except BringRequestException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="notify_request_failed", + ) from e + except ValueError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="notify_missing_argument_item", + translation_placeholders={ + "service": f"{DOMAIN}.{SERVICE_PUSH_NOTIFICATION}", + }, + ) from e From 830e8d7b946c1e7c37509884e60c82b9c21974e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Hjelseth=20H=C3=B8yer?= Date: Wed, 24 Apr 2024 20:00:06 +0200 Subject: [PATCH 390/426] Fix statistic bug in Tibber sensor (#116112) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Handle keyError in Tibber sensor Signed-off-by: Daniel Hjelseth Høyer * Constant Signed-off-by: Daniel Hjelseth Høyer --------- Signed-off-by: Daniel Hjelseth Høyer --- homeassistant/components/tibber/sensor.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index da2fd881a54..7da0a2b7947 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -53,6 +53,8 @@ from homeassistant.util import Throttle, dt as dt_util from .const import DOMAIN as TIBBER_DOMAIN, MANUFACTURER +FIVE_YEARS = 5 * 365 * 24 + _LOGGER = logging.getLogger(__name__) ICON = "mdi:currency-usd" @@ -724,9 +726,16 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): # pylint: disable=has None, {"sum"}, ) - first_stat = stat[statistic_id][0] - _sum = cast(float, first_stat["sum"]) - last_stats_time = first_stat["start"] + if statistic_id in stat: + first_stat = stat[statistic_id][0] + _sum = cast(float, first_stat["sum"]) + last_stats_time = first_stat["start"] + else: + hourly_data = await home.get_historic_data( + FIVE_YEARS, production=is_production + ) + _sum = 0.0 + last_stats_time = None statistics = [] From 4b53471b6098e0bbf53e71ab29dcad1a22f39136 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 24 Apr 2024 20:09:40 +0200 Subject: [PATCH 391/426] Bump aiopegelonline to 0.0.10 (#116114) bump aiopegelonline to 0.0.10 --- homeassistant/components/pegel_online/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/pegel_online/manifest.json b/homeassistant/components/pegel_online/manifest.json index d193fd7487a..d51278d0c1b 100644 --- a/homeassistant/components/pegel_online/manifest.json +++ b/homeassistant/components/pegel_online/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aiopegelonline"], - "requirements": ["aiopegelonline==0.0.9"] + "requirements": ["aiopegelonline==0.0.10"] } diff --git a/requirements_all.txt b/requirements_all.txt index 75a7411c64b..ee8a074bf6b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -318,7 +318,7 @@ aioopenexchangerates==0.4.0 aiooui==0.1.5 # homeassistant.components.pegel_online -aiopegelonline==0.0.9 +aiopegelonline==0.0.10 # homeassistant.components.acmeda aiopulse==0.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4e8f9ecb69f..2eb9a80281f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -291,7 +291,7 @@ aioopenexchangerates==0.4.0 aiooui==0.1.5 # homeassistant.components.pegel_online -aiopegelonline==0.0.9 +aiopegelonline==0.0.10 # homeassistant.components.acmeda aiopulse==0.4.4 From f8c38fad0024f38ecd99524427fb3d6054b53708 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Wed, 24 Apr 2024 20:47:22 +0200 Subject: [PATCH 392/426] Split out event handling from Axis hub (#113837) * Split out event handling from Axis hub * Improve test coverage * Mark internal methods with '_' * Rename to event source --- .../components/axis/hub/event_source.py | 93 +++++++++++++++++++ homeassistant/components/axis/hub/hub.py | 77 +++------------ tests/components/axis/conftest.py | 9 +- tests/components/axis/test_hub.py | 15 ++- 4 files changed, 126 insertions(+), 68 deletions(-) create mode 100644 homeassistant/components/axis/hub/event_source.py diff --git a/homeassistant/components/axis/hub/event_source.py b/homeassistant/components/axis/hub/event_source.py new file mode 100644 index 00000000000..7f2bfe7c982 --- /dev/null +++ b/homeassistant/components/axis/hub/event_source.py @@ -0,0 +1,93 @@ +"""Axis network device abstraction.""" + +from __future__ import annotations + +import axis +from axis.errors import Unauthorized +from axis.interfaces.mqtt import mqtt_json_to_event +from axis.models.mqtt import ClientState +from axis.stream_manager import Signal, State + +from homeassistant.components import mqtt +from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN +from homeassistant.components.mqtt.models import ReceiveMessage +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.setup import async_when_setup + + +class AxisEventSource: + """Manage connection to event sources from an Axis device.""" + + def __init__( + self, hass: HomeAssistant, config_entry: ConfigEntry, api: axis.AxisDevice + ) -> None: + """Initialize the device.""" + self.hass = hass + self.config_entry = config_entry + self.api = api + + self.signal_reachable = f"axis_reachable_{config_entry.entry_id}" + + self.available = True + + @callback + def setup(self) -> None: + """Set up the device events.""" + self.api.stream.connection_status_callback.append(self._connection_status_cb) + self.api.enable_events() + self.api.stream.start() + + if self.api.vapix.mqtt.supported: + async_when_setup(self.hass, MQTT_DOMAIN, self._async_use_mqtt) + + @callback + def teardown(self) -> None: + """Tear down connections.""" + self._disconnect_from_stream() + + @callback + def _disconnect_from_stream(self) -> None: + """Stop stream.""" + if self.api.stream.state != State.STOPPED: + self.api.stream.connection_status_callback.clear() + self.api.stream.stop() + + async def _async_use_mqtt(self, hass: HomeAssistant, component: str) -> None: + """Set up to use MQTT.""" + try: + status = await self.api.vapix.mqtt.get_client_status() + except Unauthorized: + # This means the user has too low privileges + return + + if status.status.state == ClientState.ACTIVE: + self.config_entry.async_on_unload( + await mqtt.async_subscribe( + hass, f"{status.config.device_topic_prefix}/#", self._mqtt_message + ) + ) + + @callback + def _mqtt_message(self, message: ReceiveMessage) -> None: + """Receive Axis MQTT message.""" + self._disconnect_from_stream() + + if message.topic.endswith("event/connection"): + return + + event = mqtt_json_to_event(message.payload) + self.api.event.handler(event) + + @callback + def _connection_status_cb(self, status: Signal) -> None: + """Handle signals of device connection status. + + This is called on every RTSP keep-alive message. + Only signal state change if state change is true. + """ + + if self.available != (status == Signal.PLAYING): + self.available = not self.available + async_dispatcher_send(self.hass, self.signal_reachable) diff --git a/homeassistant/components/axis/hub/hub.py b/homeassistant/components/axis/hub/hub.py index 4abd1358417..4e58e3be7c6 100644 --- a/homeassistant/components/axis/hub/hub.py +++ b/homeassistant/components/axis/hub/hub.py @@ -5,24 +5,17 @@ from __future__ import annotations from typing import Any import axis -from axis.errors import Unauthorized -from axis.interfaces.mqtt import mqtt_json_to_event -from axis.models.mqtt import ClientState -from axis.stream_manager import Signal, State -from homeassistant.components import mqtt -from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN -from homeassistant.components.mqtt.models import ReceiveMessage from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.setup import async_when_setup from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN from .config import AxisConfig from .entity_loader import AxisEntityLoader +from .event_source import AxisEventSource class AxisHub: @@ -35,9 +28,9 @@ class AxisHub: self.hass = hass self.config = AxisConfig.from_config_entry(config_entry) self.entity_loader = AxisEntityLoader(self) + self.event_source = AxisEventSource(hass, config_entry, api) self.api = api - self.available = True self.fw_version = api.vapix.firmware_version self.product_type = api.vapix.product_type self.unique_id = format_mac(api.vapix.serial_number) @@ -51,32 +44,23 @@ class AxisHub: hub: AxisHub = hass.data[AXIS_DOMAIN][config_entry.entry_id] return hub + @property + def available(self) -> bool: + """Connection state to the device.""" + return self.event_source.available + # Signals @property def signal_reachable(self) -> str: """Device specific event to signal a change in connection status.""" - return f"axis_reachable_{self.config.entry.entry_id}" + return self.event_source.signal_reachable @property def signal_new_address(self) -> str: """Device specific event to signal a change in device address.""" return f"axis_new_address_{self.config.entry.entry_id}" - # Callbacks - - @callback - def connection_status_callback(self, status: Signal) -> None: - """Handle signals of device connection status. - - This is called on every RTSP keep-alive message. - Only signal state change if state change is true. - """ - - if self.available != (status == Signal.PLAYING): - self.available = not self.available - async_dispatcher_send(self.hass, self.signal_reachable) - @staticmethod async def async_new_address_callback( hass: HomeAssistant, config_entry: ConfigEntry @@ -89,6 +73,7 @@ class AxisHub: """ hub = AxisHub.get_hub(hass, config_entry) hub.config = AxisConfig.from_config_entry(config_entry) + hub.event_source.config_entry = config_entry hub.api.config.host = hub.config.host async_dispatcher_send(hass, hub.signal_new_address) @@ -106,57 +91,19 @@ class AxisHub: sw_version=self.fw_version, ) - async def async_use_mqtt(self, hass: HomeAssistant, component: str) -> None: - """Set up to use MQTT.""" - try: - status = await self.api.vapix.mqtt.get_client_status() - except Unauthorized: - # This means the user has too low privileges - return - if status.status.state == ClientState.ACTIVE: - self.config.entry.async_on_unload( - await mqtt.async_subscribe( - hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message - ) - ) - - @callback - def mqtt_message(self, message: ReceiveMessage) -> None: - """Receive Axis MQTT message.""" - self.disconnect_from_stream() - if message.topic.endswith("event/connection"): - return - event = mqtt_json_to_event(message.payload) - self.api.event.handler(event) - # Setup and teardown methods @callback def setup(self) -> None: """Set up the device events.""" self.entity_loader.initialize_platforms() - - self.api.stream.connection_status_callback.append( - self.connection_status_callback - ) - self.api.enable_events() - self.api.stream.start() - - if self.api.vapix.mqtt.supported: - async_when_setup(self.hass, MQTT_DOMAIN, self.async_use_mqtt) - - @callback - def disconnect_from_stream(self) -> None: - """Stop stream.""" - if self.api.stream.state != State.STOPPED: - self.api.stream.connection_status_callback.clear() - self.api.stream.stop() + self.event_source.setup() async def shutdown(self, event: Event) -> None: """Stop the event stream.""" - self.disconnect_from_stream() + self.event_source.teardown() @callback def teardown(self) -> None: """Reset this device to default state.""" - self.disconnect_from_stream() + self.event_source.teardown() diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index b50a28df49f..7a4e446a0cc 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -114,6 +114,7 @@ def default_request_fixture( port_management_payload: dict[str, Any], param_properties_payload: dict[str, Any], param_ports_payload: dict[str, Any], + mqtt_status_code: int, ) -> Callable[[str], None]: """Mock default Vapix requests responses.""" @@ -131,7 +132,7 @@ def default_request_fixture( json=port_management_payload, ) respx.post("/axis-cgi/mqtt/client.cgi").respond( - json=MQTT_CLIENT_RESPONSE, + json=MQTT_CLIENT_RESPONSE, status_code=mqtt_status_code ) respx.post("/axis-cgi/streamprofile.cgi").respond( json=STREAM_PROFILES_RESPONSE, @@ -239,6 +240,12 @@ def param_ports_data_fixture() -> dict[str, Any]: return PORTS_RESPONSE +@pytest.fixture(name="mqtt_status_code") +def mqtt_status_code_fixture(): + """Property parameter data.""" + return 200 + + @pytest.fixture(name="setup_default_vapix_requests") def default_vapix_requests_fixture(mock_vapix_requests: Callable[[str], None]) -> None: """Mock default Vapix requests responses.""" diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index 1ae6db05427..5948874f0bf 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -2,7 +2,7 @@ from ipaddress import ip_address from unittest import mock -from unittest.mock import Mock, patch +from unittest.mock import Mock, call, patch import axis as axislib import pytest @@ -91,7 +91,8 @@ async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry ) -> None: """Successful setup.""" - mqtt_mock.async_subscribe.assert_called_with(f"axis/{MAC}/#", mock.ANY, 0, "utf-8") + mqtt_call = call(f"axis/{MAC}/#", mock.ANY, 0, "utf-8") + assert mqtt_call in mqtt_mock.async_subscribe.call_args_list topic = f"axis/{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0" message = ( @@ -109,6 +110,16 @@ async def test_device_support_mqtt( assert pir.name == f"{NAME} PIR 0" +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) +@pytest.mark.parametrize("mqtt_status_code", [401]) +async def test_device_support_mqtt_low_privilege( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry +) -> None: + """Successful setup.""" + mqtt_call = call(f"{MAC}/#", mock.ANY, 0, "utf-8") + assert mqtt_call not in mqtt_mock.async_subscribe.call_args_list + + async def test_update_address( hass: HomeAssistant, setup_config_entry, mock_vapix_requests ) -> None: From 79d6c2e75af563f9253daf69c5b82979c18a4052 Mon Sep 17 00:00:00 2001 From: prabhjotsbhatia-ca <56749856+prabhjotsbhatia-ca@users.noreply.github.com> Date: Wed, 24 Apr 2024 14:51:46 -0400 Subject: [PATCH 393/426] Add all supported languages to Google Translate and remove unsupported ones (#107404) * Adding supported language codes from Google Translate Added all languages that Google Translate supports. * Corrected alphabetical order of languages * Remove languages not actually supported for speech Previously I added languages supported by Google Translate. Based on comments received, I manually verified each language, and removed languages that are not actually supported for speech in Google Translate. * Add instructions to update the list of supported languages Added instructions as suggested so as to facilitate easier update on this list. * Reformat comment in const.py --------- Co-authored-by: Erik Montnemery --- .../components/google_translate/const.py | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/google_translate/const.py b/homeassistant/components/google_translate/const.py index 76827606816..68d8208f26b 100644 --- a/homeassistant/components/google_translate/const.py +++ b/homeassistant/components/google_translate/const.py @@ -7,8 +7,25 @@ DEFAULT_LANG = "en" DEFAULT_TLD = "com" DOMAIN = "google_translate" +# INSTRUCTIONS TO UPDATE LIST: +# +# Removal: +# Removal is as simple as deleting the line containing the language code no longer +# supported. +# +# Addition: +# In order to add to this list, follow the below steps: +# 1. Find out if the language is supported: Go to Google Translate website and try +# translating any word from English into your desired language. +# If the "speech" icon is grayed out or no speech is generated, the language is +# not supported and cannot be added. Otherwise, proceed: +# 2. Grab the language code from https://cloud.google.com/translate/docs/languages +# 3. Add the language code in SUPPORT_LANGUAGES, making sure to not disturb the +# alphabetical nature of the list. + SUPPORT_LANGUAGES = [ "af", + "am", "ar", "bg", "bn", @@ -20,16 +37,18 @@ SUPPORT_LANGUAGES = [ "de", "el", "en", - "eo", "es", "et", + "eu", "fi", + "fil", "fr", + "gl", "gu", + "ha", "hi", "hr", "hu", - "hy", "id", "is", "it", @@ -40,15 +59,16 @@ SUPPORT_LANGUAGES = [ "kn", "ko", "la", - "lv", "lt", - "mk", + "lv", "ml", "mr", + "ms", "my", "ne", "nl", "no", + "pa", "pl", "pt", "ro", From f2fe62d159e0cd040510424c52ce33967562fc7a Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 24 Apr 2024 23:16:48 +0200 Subject: [PATCH 394/426] Bump version to 2024.6.0dev0 (#116120) --- .github/workflows/ci.yaml | 2 +- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 115c1a932ea..aa91cf97895 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,7 @@ env: CACHE_VERSION: 8 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.5" + HA_SHORT_VERSION: "2024.6" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version diff --git a/homeassistant/const.py b/homeassistant/const.py index ba83eca58d8..45ff6ecf976 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -22,7 +22,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 5 +MINOR_VERSION: Final = 6 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/pyproject.toml b/pyproject.toml index 7e3038f6ee2..70bd5c7dba3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.5.0.dev0" +version = "2024.6.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 12c2ed5c4d9a0b3b277a4fb9c4dadb9794f74283 Mon Sep 17 00:00:00 2001 From: Chris Roberts Date: Thu, 25 Apr 2024 01:25:10 -0500 Subject: [PATCH 395/426] Add play/pause functionality for Vizio Smartcast media_player entities (#108896) * Add play/pause functionality to vizio integration Leverages existing pyvizio functionality. My impression is that it also works for soundbars based on https://github.com/exiva/Vizio_SmartCast_API/issues/19. * Set vizio assumed_state to True The Vizio API is only capable of indicating whether the device is on or off and not whether it's playing/paused/idle. Setting assumed_state to True gives us separate Play and Pause buttons versus the (useless) merged Play/Pause button we would get otherwise. --- homeassistant/components/vizio/const.py | 4 +++- homeassistant/components/vizio/media_player.py | 9 +++++++++ tests/components/vizio/test_media_player.py | 4 ++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/vizio/const.py b/homeassistant/components/vizio/const.py index 12de3af1cb0..03caa723771 100644 --- a/homeassistant/components/vizio/const.py +++ b/homeassistant/components/vizio/const.py @@ -52,7 +52,9 @@ DEVICE_ID = "pyvizio" DOMAIN = "vizio" COMMON_SUPPORTED_COMMANDS = ( - MediaPlayerEntityFeature.SELECT_SOURCE + MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/vizio/media_player.py b/homeassistant/components/vizio/media_player.py index c19c091bb3d..18af2c0dbb2 100644 --- a/homeassistant/components/vizio/media_player.py +++ b/homeassistant/components/vizio/media_player.py @@ -159,6 +159,7 @@ class VizioDevice(MediaPlayerEntity): ) self._device = device self._max_volume = float(device.get_max_volume()) + self._attr_assumed_state = True # Entity class attributes that will change with each update (we only include # the ones that are initialized differently from the defaults) @@ -483,3 +484,11 @@ class VizioDevice(MediaPlayerEntity): num = int(self._max_volume * (self._attr_volume_level - volume)) await self._device.vol_down(num=num, log_api_exception=False) self._attr_volume_level = volume + + async def async_media_play(self) -> None: + """Play whatever media is currently active.""" + await self._device.play(log_api_exception=False) + + async def async_media_pause(self) -> None: + """Pause whatever media is currently active.""" + await self._device.pause(log_api_exception=False) diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index d5ce18eb8b9..8cc734b9188 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -28,6 +28,8 @@ from homeassistant.components.media_player import ( ATTR_SOUND_MODE, DOMAIN as MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, @@ -443,6 +445,8 @@ async def test_services( "eq", "Music", ) + await _test_service(hass, MP_DOMAIN, "play", SERVICE_MEDIA_PLAY, None) + await _test_service(hass, MP_DOMAIN, "pause", SERVICE_MEDIA_PAUSE, None) async def test_options_update( From 59dc394ac7c77d18e8cb09d1f78051a3030b79f6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 25 Apr 2024 10:48:32 +0200 Subject: [PATCH 396/426] Fix language in strict connection guard page (#116154) --- homeassistant/components/http/strict_connection_guard_page.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/http/strict_connection_guard_page.html b/homeassistant/components/http/strict_connection_guard_page.html index 86ea8e00e90..8567e500c9d 100644 --- a/homeassistant/components/http/strict_connection_guard_page.html +++ b/homeassistant/components/http/strict_connection_guard_page.html @@ -123,7 +123,7 @@

You need access

- This device is not known on + This device is not known to Home Assistant.

From 1241d70b3b4737aa0338a46b2dec82991b758dc1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 10:52:57 +0200 Subject: [PATCH 397/426] Bump actions/checkout from 4.1.3 to 4.1.4 (#116147) Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.3 to 4.1.4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4.1.3...v4.1.4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 12 +++++------ .github/workflows/ci.yaml | 34 +++++++++++++++--------------- .github/workflows/codeql.yml | 2 +- .github/workflows/translations.yml | 2 +- .github/workflows/wheels.yml | 6 +++--- 5 files changed, 28 insertions(+), 28 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index bc70eafd3f4..6a4a172638f 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -27,7 +27,7 @@ jobs: publish: ${{ steps.version.outputs.publish }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 with: fetch-depth: 0 @@ -90,7 +90,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' @@ -242,7 +242,7 @@ jobs: - green steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set build additional args run: | @@ -279,7 +279,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Initialize git uses: home-assistant/actions/helpers/git-init@master @@ -320,7 +320,7 @@ jobs: registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Install Cosign uses: sigstore/cosign-installer@v3.4.0 @@ -450,7 +450,7 @@ jobs: if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index aa91cf97895..75c64ec8ff5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,7 +89,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Generate partial Python venv restore key id: generate_python_cache_key run: >- @@ -224,7 +224,7 @@ jobs: - info steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -270,7 +270,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -310,7 +310,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -349,7 +349,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -443,7 +443,7 @@ jobs: python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -520,7 +520,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -552,7 +552,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -585,7 +585,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -629,7 +629,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -702,7 +702,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -763,7 +763,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -879,7 +879,7 @@ jobs: ffmpeg \ libmariadb-dev-compat - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1002,7 +1002,7 @@ jobs: ffmpeg \ postgresql-server-dev-14 - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1097,7 +1097,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.6 with: @@ -1144,7 +1144,7 @@ jobs: ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1231,7 +1231,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.6 with: diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index d1393c97462..399443d23fb 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Initialize CodeQL uses: github/codeql-action/init@v3.25.2 diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 3f0559de541..3cf5a7ed089 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 2627ac70795..36a4b0c4032 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -32,7 +32,7 @@ jobs: architectures: ${{ steps.info.outputs.architectures }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python @@ -118,7 +118,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Download env_file uses: actions/download-artifact@v4.1.6 @@ -156,7 +156,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.3 + uses: actions/checkout@v4.1.4 - name: Download env_file uses: actions/download-artifact@v4.1.6 From ce4db445e81427f6e8bb76d29e86941d4a3a1420 Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Thu, 25 Apr 2024 11:21:19 +0200 Subject: [PATCH 398/426] Update unlocked icon for locks (#116157) --- homeassistant/components/lock/icons.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/lock/icons.json b/homeassistant/components/lock/icons.json index 1bf48f2ab40..0ce2e70d372 100644 --- a/homeassistant/components/lock/icons.json +++ b/homeassistant/components/lock/icons.json @@ -5,7 +5,7 @@ "state": { "jammed": "mdi:lock-alert", "locking": "mdi:lock-clock", - "unlocked": "mdi:lock-open", + "unlocked": "mdi:lock-open-variant", "unlocking": "mdi:lock-clock" } } @@ -13,6 +13,6 @@ "services": { "lock": "mdi:lock", "open": "mdi:door-open", - "unlock": "mdi:lock-open" + "unlock": "mdi:lock-open-variant" } } From 1e1e5ccc7a23773d18ef28084e19063c9491a553 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 11:23:15 +0200 Subject: [PATCH 399/426] Bump actions/download-artifact from 4.1.6 to 4.1.7 (#116148) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.6 to 4.1.7. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4.1.6...v4.1.7) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 4 ++-- .github/workflows/ci.yaml | 6 +++--- .github/workflows/wheels.yml | 10 +++++----- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 6a4a172638f..a72c4e75cfe 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: translations @@ -458,7 +458,7 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: translations diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 75c64ec8ff5..580aba9752c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -785,7 +785,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: pytest_buckets - name: Compile English translations @@ -1099,7 +1099,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1233,7 +1233,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: pattern: coverage-* - name: Upload coverage to Codecov diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 36a4b0c4032..4f652b7a0a1 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -121,12 +121,12 @@ jobs: uses: actions/checkout@v4.1.4 - name: Download env_file - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: requirements_diff @@ -159,17 +159,17 @@ jobs: uses: actions/checkout@v4.1.4 - name: Download env_file - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: requirements_diff - name: Download requirements_all_wheels - uses: actions/download-artifact@v4.1.6 + uses: actions/download-artifact@v4.1.7 with: name: requirements_all_wheels From e2c60e9333e16a3edd5f52d491cffea8ace696d3 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 25 Apr 2024 12:27:34 +0200 Subject: [PATCH 400/426] Update mypy to 1.10.0 (#116158) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index 5470bc2a49d..7fa9b3d8c89 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -11,7 +11,7 @@ astroid==3.1.0 coverage==7.5.0 freezegun==1.4.0 mock-open==1.4.0 -mypy-dev==1.10.0a3 +mypy==1.10.0 pre-commit==3.7.0 pydantic==1.10.12 pylint==3.1.0 From 2e88ba40ff952f0140ce26e16b24fe1893e2df2e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 25 Apr 2024 13:01:41 +0200 Subject: [PATCH 401/426] Fix lying docstring for relative_time template function (#116146) * Fix lying docstring for relative_time template function * Update homeassistant/helpers/template.py Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare --- homeassistant/helpers/template.py | 3 ++- tests/helpers/test_template.py | 32 +++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 335d6842548..731b8f720e4 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -2477,7 +2477,8 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: The age can be in second, minute, hour, day, month or year. Only the biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will be returned. - Make sure date is not in the future, or else it will return None. + If the input datetime is in the future, + the input datetime will be returned. If the input are not a datetime object the input will be returned unmodified. """ diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index d134570d119..13b55e52bb5 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -2307,6 +2307,38 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: ).async_render() assert result == "string" + # Test behavior when current time is same as the input time + result = template.Template( + ( + "{{" + " relative_time(" + " strptime(" + ' "2000-01-01 10:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "0 seconds" + + # Test behavior when the input time is in the future + result = template.Template( + ( + "{{" + " relative_time(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2000-01-01 11:00:00+00:00" + info = template.Template(relative_time_template, hass).async_render_to_info() assert info.has_time is True From 6bff0c384ffb1d26d21b52b19eb92439549beb28 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 25 Apr 2024 13:02:18 +0200 Subject: [PATCH 402/426] Remove deprecation warnings for relative_time (#116144) * Remove deprecation warnings for relative_time * Update homeassistant/helpers/template.py Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com> --------- Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com> --- .../components/homeassistant/strings.json | 4 --- homeassistant/helpers/template.py | 26 +++---------------- tests/helpers/test_template.py | 6 +---- 3 files changed, 4 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 5cdd47d8be4..09b2f17c947 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -56,10 +56,6 @@ "config_entry_reauth": { "title": "[%key:common::config_flow::title::reauth%]", "description": "Reauthentication is needed" - }, - "template_function_relative_time_deprecated": { - "title": "The {relative_time} template function is deprecated", - "description": "The {relative_time} template function is deprecated in Home Assistant. Please use the {time_since} or {time_until} template functions instead." } }, "system_health": { diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 731b8f720e4..c12494ba71b 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -59,7 +59,6 @@ from homeassistant.const import ( UnitOfLength, ) from homeassistant.core import ( - DOMAIN as HA_DOMAIN, Context, HomeAssistant, State, @@ -2481,30 +2480,11 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: the input datetime will be returned. If the input are not a datetime object the input will be returned unmodified. + + Note: This template function is deprecated in favor of `time_until`, but is still + supported so as not to break old templates. """ - def warn_relative_time_deprecated() -> None: - ir = issue_registry.async_get(hass) - issue_id = "template_function_relative_time_deprecated" - if ir.async_get_issue(HA_DOMAIN, issue_id): - return - issue_registry.async_create_issue( - hass, - HA_DOMAIN, - issue_id, - breaks_in_ha_version="2024.11.0", - is_fixable=False, - severity=issue_registry.IssueSeverity.WARNING, - translation_key=issue_id, - translation_placeholders={ - "relative_time": "relative_time()", - "time_since": "time_since()", - "time_until": "time_until()", - }, - ) - _LOGGER.warning("Template function 'relative_time' is deprecated") - - warn_relative_time_deprecated() if (render_info := _render_info.get()) is not None: render_info.has_time = True diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 13b55e52bb5..1e2e512cf3d 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -31,7 +31,7 @@ from homeassistant.const import ( UnitOfTemperature, UnitOfVolume, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError from homeassistant.helpers import ( area_registry as ar, @@ -2240,7 +2240,6 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: """Test relative_time method.""" hass.config.set_time_zone("UTC") now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") - issue_registry = ir.async_get(hass) relative_time_template = ( '{{relative_time(strptime("2000-01-01 09:00:00", "%Y-%m-%d %H:%M:%S"))}}' ) @@ -2250,9 +2249,6 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: hass, ).async_render() assert result == "1 hour" - assert issue_registry.async_get_issue( - HA_DOMAIN, "template_function_relative_time_deprecated" - ) result = template.Template( ( "{{" From 855bb57d5e234ac24519762656d9ba2595e84d15 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 25 Apr 2024 17:32:42 +0200 Subject: [PATCH 403/426] Revert "Return specific group state if there is one" (#116176) Revert "Return specific group state if there is one (#115866)" This reverts commit 350ca48d4c10b2105e1e3513da7137498dd6ad83. --- homeassistant/components/group/entity.py | 95 ++++------------------ homeassistant/components/group/registry.py | 14 +--- tests/components/group/test_init.py | 24 +----- 3 files changed, 24 insertions(+), 109 deletions(-) diff --git a/homeassistant/components/group/entity.py b/homeassistant/components/group/entity.py index 5ac913dde8d..a8fd9027984 100644 --- a/homeassistant/components/group/entity.py +++ b/homeassistant/components/group/entity.py @@ -8,7 +8,7 @@ from collections.abc import Callable, Collection, Mapping import logging from typing import Any -from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_ON from homeassistant.core import ( CALLBACK_TYPE, Event, @@ -131,9 +131,6 @@ class Group(Entity): _unrecorded_attributes = frozenset({ATTR_ENTITY_ID, ATTR_ORDER, ATTR_AUTO}) _attr_should_poll = False - # In case there is only one active domain we use specific ON or OFF - # values, if all ON or OFF states are equal - single_active_domain: str | None tracking: tuple[str, ...] trackable: tuple[str, ...] @@ -290,7 +287,6 @@ class Group(Entity): if not entity_ids: self.tracking = () self.trackable = () - self.single_active_domain = None return registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] @@ -298,22 +294,12 @@ class Group(Entity): tracking: list[str] = [] trackable: list[str] = [] - self.single_active_domain = None - multiple_domains: bool = False for ent_id in entity_ids: ent_id_lower = ent_id.lower() domain = split_entity_id(ent_id_lower)[0] tracking.append(ent_id_lower) - if domain in excluded_domains: - continue - - trackable.append(ent_id_lower) - - if not multiple_domains and self.single_active_domain is None: - self.single_active_domain = domain - if self.single_active_domain != domain: - multiple_domains = True - self.single_active_domain = None + if domain not in excluded_domains: + trackable.append(ent_id_lower) self.trackable = tuple(trackable) self.tracking = tuple(tracking) @@ -409,36 +395,10 @@ class Group(Entity): self._on_off[entity_id] = state in registry.on_off_mapping else: entity_on_state = registry.on_states_by_domain[domain] - self._on_states.update(entity_on_state) + if domain in registry.on_states_by_domain: + self._on_states.update(entity_on_state) self._on_off[entity_id] = state in entity_on_state - def _detect_specific_on_off_state(self, group_is_on: bool) -> set[str]: - """Check if a specific ON or OFF state is possible.""" - # In case the group contains entities of the same domain with the same ON - # or an OFF state (one or more domains), we want to use that specific state. - # If we have more then one ON or OFF state we default to STATE_ON or STATE_OFF. - registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] - active_on_states: set[str] = set() - active_off_states: set[str] = set() - for entity_id in self.trackable: - if (state := self.hass.states.get(entity_id)) is None: - continue - current_state = state.state - if ( - group_is_on - and (domain_on_states := registry.on_states_by_domain.get(state.domain)) - and current_state in domain_on_states - ): - active_on_states.add(current_state) - # If we have more than one on state, the group state - # will result in STATE_ON and we can stop checking - if len(active_on_states) > 1: - break - elif current_state in registry.off_on_mapping: - active_off_states.add(current_state) - - return active_on_states if group_is_on else active_off_states - @callback def _async_update_group_state(self, tr_state: State | None = None) -> None: """Update group state. @@ -465,48 +425,27 @@ class Group(Entity): elif tr_state.attributes.get(ATTR_ASSUMED_STATE): self._assumed_state = True - # If we do not have an on state for any domains - # we use None (which will be STATE_UNKNOWN) - if (num_on_states := len(self._on_states)) == 0: - self._state = None - return - - group_is_on = self.mode(self._on_off.values()) - + num_on_states = len(self._on_states) # If all the entity domains we are tracking # have the same on state we use this state # and its hass.data[REG_KEY].on_off_mapping to off if num_on_states == 1: - on_state = next(iter(self._on_states)) + on_state = list(self._on_states)[0] + # If we do not have an on state for any domains + # we use None (which will be STATE_UNKNOWN) + elif num_on_states == 0: + self._state = None + return # If the entity domains have more than one - # on state, we use STATE_ON/STATE_OFF, unless there is - # only one specific `on` state in use for one specific domain - elif self.single_active_domain and num_on_states: - active_on_states = self._detect_specific_on_off_state(True) - on_state = ( - list(active_on_states)[0] if len(active_on_states) == 1 else STATE_ON - ) - elif group_is_on: + # on state, we use STATE_ON/STATE_OFF + else: on_state = STATE_ON + group_is_on = self.mode(self._on_off.values()) if group_is_on: self._state = on_state - return - - registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] - if ( - active_domain := self.single_active_domain - ) and active_domain in registry.off_state_by_domain: - # If there is only one domain used, - # then we return the off state for that domain.s - self._state = registry.off_state_by_domain[active_domain] else: - active_off_states = self._detect_specific_on_off_state(False) - # If there is one off state in use then we return that specific state, - # also if there a multiple domains involved, e.g. - # person and device_tracker, with a shared state. - self._state = ( - list(active_off_states)[0] if len(active_off_states) == 1 else STATE_OFF - ) + registry: GroupIntegrationRegistry = self.hass.data[REG_KEY] + self._state = registry.on_off_mapping[on_state] def async_get_component(hass: HomeAssistant) -> EntityComponent[Group]: diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index 474448db68a..6cdb929d60c 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -49,12 +49,9 @@ class GroupIntegrationRegistry: def __init__(self) -> None: """Imitialize registry.""" - self.on_off_mapping: dict[str, dict[str | None, str]] = { - STATE_ON: {None: STATE_OFF} - } + self.on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} self.off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} self.on_states_by_domain: dict[str, set[str]] = {} - self.off_state_by_domain: dict[str, str] = {} self.exclude_domains: set[str] = set() def exclude_domain(self) -> None: @@ -63,14 +60,11 @@ class GroupIntegrationRegistry: def on_off_states(self, on_states: set, off_state: str) -> None: """Register on and off states for the current domain.""" - domain = current_domain.get() for on_state in on_states: if on_state not in self.on_off_mapping: - self.on_off_mapping[on_state] = {domain: off_state} - else: - self.on_off_mapping[on_state][domain] = off_state + self.on_off_mapping[on_state] = off_state + if len(on_states) == 1 and off_state not in self.off_on_mapping: self.off_on_mapping[off_state] = list(on_states)[0] - self.on_states_by_domain[domain] = set(on_states) - self.off_state_by_domain[domain] = off_state + self.on_states_by_domain[current_domain.get()] = set(on_states) diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index b9cdfcb1590..d3f2747933e 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -9,7 +9,7 @@ from unittest.mock import patch import pytest -from homeassistant.components import group, vacuum +from homeassistant.components import group from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_FRIENDLY_NAME, @@ -659,24 +659,6 @@ async def test_is_on(hass: HomeAssistant) -> None: (STATE_ON, True), (STATE_OFF, False), ), - ( - ("vacuum", "vacuum"), - # Cleaning is the only on state - (vacuum.STATE_DOCKED, vacuum.STATE_CLEANING), - # Returning is the only on state - (vacuum.STATE_RETURNING, vacuum.STATE_PAUSED), - (vacuum.STATE_CLEANING, True), - (vacuum.STATE_RETURNING, True), - ), - ( - ("vacuum", "vacuum"), - # Multiple on states, so group state will be STATE_ON - (vacuum.STATE_RETURNING, vacuum.STATE_CLEANING), - # Only off states, so group state will be off - (vacuum.STATE_PAUSED, vacuum.STATE_IDLE), - (STATE_ON, True), - (STATE_OFF, False), - ), ], ) async def test_is_on_and_state_mixed_domains( @@ -1238,7 +1220,7 @@ async def test_group_climate_all_cool(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - assert hass.states.get("group.group_zero").state == "cool" + assert hass.states.get("group.group_zero").state == STATE_ON async def test_group_climate_all_off(hass: HomeAssistant) -> None: @@ -1352,7 +1334,7 @@ async def test_group_vacuum_on(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - assert hass.states.get("group.group_zero").state == "cleaning" + assert hass.states.get("group.group_zero").state == STATE_ON async def test_device_tracker_not_home(hass: HomeAssistant) -> None: From 98eb9a406730da2540e2b09c4a4cc6f45deb38de Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 25 Apr 2024 18:15:57 +0200 Subject: [PATCH 404/426] Revert orjson to 3.9.15 due to segmentation faults (#116168) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b88f2aefffa..aa29713a849 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -39,7 +39,7 @@ ifaddr==0.2.0 Jinja2==3.1.3 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.1 +orjson==3.9.15 packaging>=23.1 paho-mqtt==1.6.1 Pillow==10.3.0 diff --git a/pyproject.toml b/pyproject.toml index 70bd5c7dba3..baf919c2da5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ dependencies = [ "cryptography==42.0.5", "Pillow==10.3.0", "pyOpenSSL==24.1.0", - "orjson==3.10.1", + "orjson==3.9.15", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", diff --git a/requirements.txt b/requirements.txt index 34ee8237921..44c60aec07a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,7 +28,7 @@ PyJWT==2.8.0 cryptography==42.0.5 Pillow==10.3.0 pyOpenSSL==24.1.0 -orjson==3.10.1 +orjson==3.9.15 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 From 0467fca316944938ac70316d2ca1177d4df3e38f Mon Sep 17 00:00:00 2001 From: On Freund Date: Thu, 25 Apr 2024 20:43:31 +0300 Subject: [PATCH 405/426] Bump pyrisco to 0.6.1 (#116182) --- homeassistant/components/risco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index 4c590b95e52..22e73a10d6d 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["pyrisco"], "quality_scale": "platinum", - "requirements": ["pyrisco==0.6.0"] + "requirements": ["pyrisco==0.6.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index ee8a074bf6b..de7f9ae0d0f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2093,7 +2093,7 @@ pyrecswitch==1.0.2 pyrepetierng==0.1.0 # homeassistant.components.risco -pyrisco==0.6.0 +pyrisco==0.6.1 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2eb9a80281f..cf74c239ba2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1635,7 +1635,7 @@ pyqwikswitch==0.93 pyrainbird==4.0.2 # homeassistant.components.risco -pyrisco==0.6.0 +pyrisco==0.6.1 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 From 1e95476fa859b9e9bc148f063586784ac60b3b41 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 25 Apr 2024 12:56:33 -0500 Subject: [PATCH 406/426] Bump govee-ble to 0.31.2 (#116177) changelog: https://github.com/Bluetooth-Devices/govee-ble/compare/v0.31.0...v0.31.2 Fixes some unrelated BLE devices being detected as a GVH5106 --- homeassistant/components/govee_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json index 64feedc44c1..98b802f8233 100644 --- a/homeassistant/components/govee_ble/manifest.json +++ b/homeassistant/components/govee_ble/manifest.json @@ -90,5 +90,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/govee_ble", "iot_class": "local_push", - "requirements": ["govee-ble==0.31.0"] + "requirements": ["govee-ble==0.31.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index de7f9ae0d0f..3f63deb39a6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -980,7 +980,7 @@ goslide-api==0.5.1 gotailwind==0.2.2 # homeassistant.components.govee_ble -govee-ble==0.31.0 +govee-ble==0.31.2 # homeassistant.components.govee_light_local govee-local-api==1.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cf74c239ba2..3a8e18c50a3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -803,7 +803,7 @@ googlemaps==2.5.1 gotailwind==0.2.2 # homeassistant.components.govee_ble -govee-ble==0.31.0 +govee-ble==0.31.2 # homeassistant.components.govee_light_local govee-local-api==1.4.4 From 18be38d19f899bf471835a376730ac34403b0b31 Mon Sep 17 00:00:00 2001 From: rappenze Date: Thu, 25 Apr 2024 19:57:15 +0200 Subject: [PATCH 407/426] Bump pyfibaro to 0.7.8 (#116126) --- homeassistant/components/fibaro/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fibaro/manifest.json b/homeassistant/components/fibaro/manifest.json index bb1558f998b..39850672d06 100644 --- a/homeassistant/components/fibaro/manifest.json +++ b/homeassistant/components/fibaro/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyfibaro"], - "requirements": ["pyfibaro==0.7.7"] + "requirements": ["pyfibaro==0.7.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3f63deb39a6..e69bb96e202 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1818,7 +1818,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.7 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a8e18c50a3..f8afe088cb1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1417,7 +1417,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.7 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 From 316f58404d68328835a9a716b21606cb98cb805e Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Thu, 25 Apr 2024 19:58:13 +0200 Subject: [PATCH 408/426] Update rfxtrx to 0.31.1 (#116125) --- homeassistant/components/rfxtrx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/rfxtrx/manifest.json b/homeassistant/components/rfxtrx/manifest.json index ec902855f27..bb3701e2e31 100644 --- a/homeassistant/components/rfxtrx/manifest.json +++ b/homeassistant/components/rfxtrx/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rfxtrx", "iot_class": "local_push", "loggers": ["RFXtrx"], - "requirements": ["pyRFXtrx==0.31.0"] + "requirements": ["pyRFXtrx==0.31.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index e69bb96e202..bb5fbd528bf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1661,7 +1661,7 @@ pyEmby==1.9 pyHik==0.3.2 # homeassistant.components.rfxtrx -pyRFXtrx==0.31.0 +pyRFXtrx==0.31.1 # homeassistant.components.sony_projector pySDCP==1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f8afe088cb1..4c6f5d590e9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1311,7 +1311,7 @@ pyDuotecno==2024.3.2 pyElectra==1.2.0 # homeassistant.components.rfxtrx -pyRFXtrx==0.31.0 +pyRFXtrx==0.31.1 # homeassistant.components.tibber pyTibber==0.28.2 From cc791295871645e38f09b11356e969f5fbdf6d3c Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Thu, 25 Apr 2024 14:26:11 -0400 Subject: [PATCH 409/426] Make Roborock listener update thread safe (#116184) Co-authored-by: J. Nick Koston --- homeassistant/components/roborock/device.py | 2 +- tests/components/roborock/test_sensor.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/roborock/device.py b/homeassistant/components/roborock/device.py index 69384d6e23a..6450d849859 100644 --- a/homeassistant/components/roborock/device.py +++ b/homeassistant/components/roborock/device.py @@ -137,4 +137,4 @@ class RoborockCoordinatedEntity( else: self.coordinator.roborock_device_info.props.consumable = value self.coordinator.data = self.coordinator.roborock_device_info.props - self.async_write_ha_state() + self.schedule_update_ha_state() diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index 23d16f643b2..88ed6e1098c 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -89,6 +89,7 @@ async def test_listener_update( ) ] ) + await hass.async_block_till_done() assert hass.states.get("sensor.roborock_s7_maxv_filter_time_left").state == str( FILTER_REPLACE_TIME - 743 ) From b3124aa7ed115db59f53274ba4ebf932073d89e6 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 25 Apr 2024 13:35:29 -0500 Subject: [PATCH 410/426] Update Ollama model names list (#116172) --- homeassistant/components/ollama/const.py | 145 ++++++++++++----------- 1 file changed, 78 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 853370066dc..e25ae1f0877 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -81,75 +81,86 @@ DEFAULT_MAX_HISTORY = 20 MAX_HISTORY_SECONDS = 60 * 60 # 1 hour MODEL_NAMES = [ # https://ollama.com/library - "gemma", - "llama2", - "mistral", - "mixtral", - "llava", - "neural-chat", - "codellama", - "dolphin-mixtral", - "qwen", - "llama2-uncensored", - "mistral-openorca", - "deepseek-coder", - "nous-hermes2", - "phi", - "orca-mini", - "dolphin-mistral", - "wizard-vicuna-uncensored", - "vicuna", - "tinydolphin", - "llama2-chinese", - "nomic-embed-text", - "openhermes", - "zephyr", - "tinyllama", - "openchat", - "wizardcoder", - "starcoder", - "phind-codellama", - "starcoder2", - "yi", - "orca2", - "falcon", - "wizard-math", - "dolphin-phi", - "starling-lm", - "nous-hermes", - "stable-code", - "medllama2", - "bakllava", - "codeup", - "wizardlm-uncensored", - "solar", - "everythinglm", - "sqlcoder", - "dolphincoder", - "nous-hermes2-mixtral", - "stable-beluga", - "yarn-mistral", - "stablelm2", - "samantha-mistral", - "meditron", - "stablelm-zephyr", - "magicoder", - "yarn-llama2", - "llama-pro", - "deepseek-llm", - "wizard-vicuna", - "codebooga", - "mistrallite", - "all-minilm", - "nexusraven", - "open-orca-platypus2", - "goliath", - "notux", - "megadolphin", "alfred", - "xwinlm", - "wizardlm", + "all-minilm", + "bakllava", + "codebooga", + "codegemma", + "codellama", + "codeqwen", + "codeup", + "command-r", + "command-r-plus", + "dbrx", + "deepseek-coder", + "deepseek-llm", + "dolphin-llama3", + "dolphin-mistral", + "dolphin-mixtral", + "dolphin-phi", + "dolphincoder", "duckdb-nsql", + "everythinglm", + "falcon", + "gemma", + "goliath", + "llama-pro", + "llama2", + "llama2-chinese", + "llama2-uncensored", + "llama3", + "llava", + "magicoder", + "meditron", + "medllama2", + "megadolphin", + "mistral", + "mistral-openorca", + "mistrallite", + "mixtral", + "mxbai-embed-large", + "neural-chat", + "nexusraven", + "nomic-embed-text", "notus", + "notux", + "nous-hermes", + "nous-hermes2", + "nous-hermes2-mixtral", + "open-orca-platypus2", + "openchat", + "openhermes", + "orca-mini", + "orca2", + "phi", + "phi3", + "phind-codellama", + "qwen", + "samantha-mistral", + "snowflake-arctic-embed", + "solar", + "sqlcoder", + "stable-beluga", + "stable-code", + "stablelm-zephyr", + "stablelm2", + "starcoder", + "starcoder2", + "starling-lm", + "tinydolphin", + "tinyllama", + "vicuna", + "wizard-math", + "wizard-vicuna", + "wizard-vicuna-uncensored", + "wizardcoder", + "wizardlm", + "wizardlm-uncensored", + "wizardlm2", + "xwinlm", + "yarn-llama2", + "yarn-mistral", + "yi", + "zephyr", ] DEFAULT_MODEL = "llama2:latest" From 51bceb1c99a6ac5d0c058b2cd7bba213cb3ea47d Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Thu, 25 Apr 2024 21:06:52 +0200 Subject: [PATCH 411/426] Fix climate entity creation when Shelly WallDisplay uses external relay as actuator (#115216) * Fix climate entity creation when Shelly WallDisplay uses external relay as actuator * More comments * Wrap condition into function --------- Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/shelly/climate.py | 6 +++- homeassistant/components/shelly/switch.py | 16 ++++++--- homeassistant/components/shelly/utils.py | 5 +++ tests/components/shelly/test_climate.py | 40 +++++++++++++++++++++- tests/components/shelly/test_switch.py | 1 + 5 files changed, 62 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index b368b38820e..81289bc1a9b 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -132,7 +132,11 @@ def async_setup_rpc_entry( climate_ids = [] for id_ in climate_key_ids: climate_ids.append(id_) - + # There are three configuration scenarios for WallDisplay: + # - relay mode (no thermostat) + # - thermostat mode using the internal relay as an actuator + # - thermostat mode using an external (from another device) relay as + # an actuator if is_rpc_thermostat_internal_actuator(coordinator.device.status): # Wall Display relay is used as the thermostat actuator, # we need to remove a switch entity diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 14fec43c58b..81b16d48ab8 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -43,6 +43,7 @@ from .utils import ( is_block_channel_type_light, is_rpc_channel_type_light, is_rpc_thermostat_internal_actuator, + is_rpc_thermostat_mode, ) @@ -140,12 +141,19 @@ def async_setup_rpc_entry( continue if coordinator.model == MODEL_WALL_DISPLAY: - if not is_rpc_thermostat_internal_actuator(coordinator.device.status): - # Wall Display relay is not used as the thermostat actuator, - # we need to remove a climate entity + # There are three configuration scenarios for WallDisplay: + # - relay mode (no thermostat) + # - thermostat mode using the internal relay as an actuator + # - thermostat mode using an external (from another device) relay as + # an actuator + if not is_rpc_thermostat_mode(id_, coordinator.device.status): + # The device is not in thermostat mode, we need to remove a climate + # entity unique_id = f"{coordinator.mac}-thermostat:{id_}" async_remove_shelly_entity(hass, "climate", unique_id) - else: + elif is_rpc_thermostat_internal_actuator(coordinator.device.status): + # The internal relay is an actuator, skip this ID so as not to create + # a switch entity continue switch_ids.append(id_) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index ce98e0d5c12..b7cb2f1476a 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -500,3 +500,8 @@ def async_remove_shelly_rpc_entities( if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"): LOGGER.debug("Removing entity: %s", entity_id) entity_reg.async_remove(entity_id) + + +def is_rpc_thermostat_mode(ident: int, status: dict[str, Any]) -> bool: + """Return True if 'thermostat:' is present in the status.""" + return f"thermostat:{ident}" in status diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index 9fee3468f11..9946dd7640d 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -25,7 +25,12 @@ from homeassistant.components.climate import ( from homeassistant.components.shelly.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_UNAVAILABLE +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import DeviceRegistry @@ -711,3 +716,36 @@ async def test_wall_display_thermostat_mode( entry = entity_registry.async_get(climate_entity_id) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" + + +async def test_wall_display_thermostat_mode_external_actuator( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test Wall Display in thermostat mode with an external actuator.""" + climate_entity_id = "climate.test_name" + switch_entity_id = "switch.test_switch_0" + + new_status = deepcopy(mock_rpc_device.status) + new_status["sys"]["relay_in_thermostat"] = False + monkeypatch.setattr(mock_rpc_device, "status", new_status) + + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + # the switch entity should be created + state = hass.states.get(switch_entity_id) + assert state + assert state.state == STATE_ON + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + + # the climate entity should be created + state = hass.states.get(climate_entity_id) + assert state + assert state.state == HVACMode.HEAT + assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 + + entry = entity_registry.async_get(climate_entity_id) + assert entry + assert entry.unique_id == "123456789ABC-thermostat:0" diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index fe2c4354afc..dd214c8841d 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -330,6 +330,7 @@ async def test_wall_display_relay_mode( new_status = deepcopy(mock_rpc_device.status) new_status["sys"]["relay_in_thermostat"] = False + new_status.pop("thermostat:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) From 8523df952e1076ffe6189f68c1d2e10c46958331 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 25 Apr 2024 14:07:07 -0500 Subject: [PATCH 412/426] Fix smartthings doing I/O in the event loop to import platforms (#116190) --- homeassistant/components/smartthings/__init__.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index 8136806cd0b..9bfa11d3293 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -28,6 +28,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration +from homeassistant.setup import SetupPhases, async_pause_setup from .config_flow import SmartThingsFlowHandler # noqa: F401 from .const import ( @@ -170,7 +171,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) # Setup device broker - broker = DeviceBroker(hass, entry, token, smart_app, devices, scenes) + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): + # DeviceBroker has a side effect of importing platform + # modules when its created. In the future this should be + # refactored to not do this. + broker = await hass.async_add_import_executor_job( + DeviceBroker, hass, entry, token, smart_app, devices, scenes + ) broker.connect() hass.data[DOMAIN][DATA_BROKERS][entry.entry_id] = broker From 9d33965bc9da4105f75ce70558577b76cd40cf8b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 25 Apr 2024 21:20:24 +0200 Subject: [PATCH 413/426] Fix flaky traccar_server tests (#116191) --- .../components/traccar_server/diagnostics.py | 4 +- .../snapshots/test_diagnostics.ambr | 190 +++++++++--------- .../traccar_server/test_diagnostics.py | 14 +- 3 files changed, 110 insertions(+), 98 deletions(-) diff --git a/homeassistant/components/traccar_server/diagnostics.py b/homeassistant/components/traccar_server/diagnostics.py index 80dc7a9c7cd..68f1e4fca8a 100644 --- a/homeassistant/components/traccar_server/diagnostics.py +++ b/homeassistant/components/traccar_server/diagnostics.py @@ -57,7 +57,7 @@ async def async_get_config_entry_diagnostics( "coordinator_data": coordinator.data, "entities": [ { - "enity_id": entity.entity_id, + "entity_id": entity.entity_id, "disabled": entity.disabled, "unit_of_measurement": entity.unit_of_measurement, "state": _entity_state(hass, entity, coordinator), @@ -92,7 +92,7 @@ async def async_get_device_diagnostics( "coordinator_data": coordinator.data, "entities": [ { - "enity_id": entity.entity_id, + "entity_id": entity.entity_id, "disabled": entity.disabled, "unit_of_measurement": entity.unit_of_measurement, "state": _entity_state(hass, entity, coordinator), diff --git a/tests/components/traccar_server/snapshots/test_diagnostics.ambr b/tests/components/traccar_server/snapshots/test_diagnostics.ambr index 89a6416c303..39e67db8df7 100644 --- a/tests/components/traccar_server/snapshots/test_diagnostics.ambr +++ b/tests/components/traccar_server/snapshots/test_diagnostics.ambr @@ -73,7 +73,30 @@ 'entities': list([ dict({ 'disabled': False, - 'enity_id': 'device_tracker.x_wing', + 'entity_id': 'binary_sensor.x_wing_motion', + 'state': dict({ + 'attributes': dict({ + 'device_class': 'motion', + 'friendly_name': 'X-Wing Motion', + }), + 'state': 'off', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'binary_sensor.x_wing_status', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Status', + }), + 'state': 'on', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'device_tracker.x_wing', 'state': dict({ 'attributes': dict({ 'category': 'starfighter', @@ -92,30 +115,31 @@ }), dict({ 'disabled': False, - 'enity_id': 'binary_sensor.x_wing_motion', + 'entity_id': 'sensor.x_wing_address', 'state': dict({ 'attributes': dict({ - 'device_class': 'motion', - 'friendly_name': 'X-Wing Motion', + 'friendly_name': 'X-Wing Address', }), - 'state': 'off', + 'state': '**REDACTED**', }), 'unit_of_measurement': None, }), dict({ 'disabled': False, - 'enity_id': 'binary_sensor.x_wing_status', + 'entity_id': 'sensor.x_wing_altitude', 'state': dict({ 'attributes': dict({ - 'friendly_name': 'X-Wing Status', + 'friendly_name': 'X-Wing Altitude', + 'state_class': 'measurement', + 'unit_of_measurement': 'm', }), - 'state': 'on', + 'state': '546841384638', }), - 'unit_of_measurement': None, + 'unit_of_measurement': 'm', }), dict({ 'disabled': False, - 'enity_id': 'sensor.x_wing_battery', + 'entity_id': 'sensor.x_wing_battery', 'state': dict({ 'attributes': dict({ 'device_class': 'battery', @@ -129,7 +153,18 @@ }), dict({ 'disabled': False, - 'enity_id': 'sensor.x_wing_speed', + 'entity_id': 'sensor.x_wing_geofence', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Geofence', + }), + 'state': 'Tatooine', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'sensor.x_wing_speed', 'state': dict({ 'attributes': dict({ 'device_class': 'speed', @@ -141,41 +176,6 @@ }), 'unit_of_measurement': 'kn', }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_altitude', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Altitude', - 'state_class': 'measurement', - 'unit_of_measurement': 'm', - }), - 'state': '546841384638', - }), - 'unit_of_measurement': 'm', - }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_address', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Address', - }), - 'state': '**REDACTED**', - }), - 'unit_of_measurement': None, - }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_geofence', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Geofence', - }), - 'state': 'Tatooine', - }), - 'unit_of_measurement': None, - }), ]), 'subscription_status': 'disconnected', }) @@ -254,51 +254,51 @@ 'entities': list([ dict({ 'disabled': True, - 'enity_id': 'binary_sensor.x_wing_motion', + 'entity_id': 'binary_sensor.x_wing_motion', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'binary_sensor.x_wing_status', + 'entity_id': 'binary_sensor.x_wing_status', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_battery', + 'entity_id': 'device_tracker.x_wing', 'state': None, - 'unit_of_measurement': '%', + 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_speed', + 'entity_id': 'sensor.x_wing_address', 'state': None, - 'unit_of_measurement': 'kn', + 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_altitude', + 'entity_id': 'sensor.x_wing_altitude', 'state': None, 'unit_of_measurement': 'm', }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_address', + 'entity_id': 'sensor.x_wing_battery', + 'state': None, + 'unit_of_measurement': '%', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_geofence', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_geofence', + 'entity_id': 'sensor.x_wing_speed', 'state': None, - 'unit_of_measurement': None, - }), - dict({ - 'disabled': True, - 'enity_id': 'device_tracker.x_wing', - 'state': None, - 'unit_of_measurement': None, + 'unit_of_measurement': 'kn', }), ]), 'subscription_status': 'disconnected', @@ -378,49 +378,19 @@ 'entities': list([ dict({ 'disabled': True, - 'enity_id': 'binary_sensor.x_wing_motion', + 'entity_id': 'binary_sensor.x_wing_motion', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'binary_sensor.x_wing_status', - 'state': None, - 'unit_of_measurement': None, - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_battery', - 'state': None, - 'unit_of_measurement': '%', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_speed', - 'state': None, - 'unit_of_measurement': 'kn', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_altitude', - 'state': None, - 'unit_of_measurement': 'm', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_address', - 'state': None, - 'unit_of_measurement': None, - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_geofence', + 'entity_id': 'binary_sensor.x_wing_status', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': False, - 'enity_id': 'device_tracker.x_wing', + 'entity_id': 'device_tracker.x_wing', 'state': dict({ 'attributes': dict({ 'category': 'starfighter', @@ -437,6 +407,36 @@ }), 'unit_of_measurement': None, }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_address', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_altitude', + 'state': None, + 'unit_of_measurement': 'm', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_battery', + 'state': None, + 'unit_of_measurement': '%', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_geofence', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_speed', + 'state': None, + 'unit_of_measurement': 'kn', + }), ]), 'subscription_status': 'disconnected', }) diff --git a/tests/components/traccar_server/test_diagnostics.py b/tests/components/traccar_server/test_diagnostics.py index 493f0ae92d1..9019cd0ebf1 100644 --- a/tests/components/traccar_server/test_diagnostics.py +++ b/tests/components/traccar_server/test_diagnostics.py @@ -33,6 +33,10 @@ async def test_entry_diagnostics( hass_client, mock_config_entry, ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name="entry") @@ -64,13 +68,17 @@ async def test_device_diagnostics( device_id=device.id, include_disabled_entities=True, ) - # Enable all entitits to show everything in snapshots + # Enable all entities to show everything in snapshots for entity in entities: entity_registry.async_update_entity(entity.entity_id, disabled_by=None) result = await get_diagnostics_for_device( hass, hass_client, mock_config_entry, device=device ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name=device.name) @@ -110,5 +118,9 @@ async def test_device_diagnostics_with_disabled_entity( result = await get_diagnostics_for_device( hass, hass_client, mock_config_entry, device=device ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name=device.name) From a8b41c90c5caa4404273a2685fca727491ba6c75 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Thu, 25 Apr 2024 21:36:00 +0200 Subject: [PATCH 414/426] Bump aioautomower to 2024.4.4 (#116185) --- homeassistant/components/husqvarna_automower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../husqvarna_automower/snapshots/test_diagnostics.ambr | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 147c6dfb6d5..647320a8bf3 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.4.3"] + "requirements": ["aioautomower==2024.4.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index bb5fbd528bf..20473b70121 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -204,7 +204,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.4.3 +aioautomower==2024.4.4 # homeassistant.components.azure_devops aioazuredevops==2.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4c6f5d590e9..1ee1c48d223 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -183,7 +183,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.4.3 +aioautomower==2024.4.4 # homeassistant.components.azure_devops aioazuredevops==2.0.0 diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ee951986062..bdbc0a60490 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -91,7 +91,7 @@ 'work_areas': dict({ '0': dict({ 'cutting_height': 50, - 'name': None, + 'name': 'my_lawn', }), '123456': dict({ 'cutting_height': 50, From 86494891175ea2f13621947c6e924dae94f3b16b Mon Sep 17 00:00:00 2001 From: Anrijs Date: Thu, 25 Apr 2024 22:38:20 +0300 Subject: [PATCH 415/426] Add support for Aranet radiation devices (#115239) * sensor: added radiation dose sensor type and units * Add support for Aranet Radiation devices * Fix Aranet Radiation CI issues * Revert "sensor: added radiation dose sensor type and units" This reverts commit 28736a7da760d3490e879bb7fe5b17f8f2b851f4. * aranet4: bump version to 2.3.3 * aranet radiation: remove removed sesnor consts * aranet radiation: use radioactive icon by default --------- Co-authored-by: Shay Levy --- CODEOWNERS | 4 +- homeassistant/components/aranet/const.py | 1 + homeassistant/components/aranet/icons.json | 12 ++++ homeassistant/components/aranet/manifest.json | 2 +- homeassistant/components/aranet/sensor.py | 24 +++++++- homeassistant/components/aranet/strings.json | 2 +- tests/components/aranet/__init__.py | 8 +++ tests/components/aranet/test_sensor.py | 60 +++++++++++++++++++ 8 files changed, 108 insertions(+), 5 deletions(-) create mode 100644 homeassistant/components/aranet/icons.json diff --git a/CODEOWNERS b/CODEOWNERS index c8a391fd7dc..45d4ad6053e 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -127,8 +127,8 @@ build.json @home-assistant/supervisor /tests/components/aprilaire/ @chamberlain2007 /homeassistant/components/aprs/ @PhilRW /tests/components/aprs/ @PhilRW -/homeassistant/components/aranet/ @aschmitz @thecode -/tests/components/aranet/ @aschmitz @thecode +/homeassistant/components/aranet/ @aschmitz @thecode @anrijs +/tests/components/aranet/ @aschmitz @thecode @anrijs /homeassistant/components/arcam_fmj/ @elupus /tests/components/arcam_fmj/ @elupus /homeassistant/components/arris_tg2492lg/ @vanbalken diff --git a/homeassistant/components/aranet/const.py b/homeassistant/components/aranet/const.py index 056c627daa8..e038a073fd5 100644 --- a/homeassistant/components/aranet/const.py +++ b/homeassistant/components/aranet/const.py @@ -1,3 +1,4 @@ """Constants for the Aranet integration.""" DOMAIN = "aranet" +ARANET_MANUFACTURER_NAME = "SAF Tehnika" diff --git a/homeassistant/components/aranet/icons.json b/homeassistant/components/aranet/icons.json new file mode 100644 index 00000000000..6d6e9a83b03 --- /dev/null +++ b/homeassistant/components/aranet/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "sensor": { + "radiation_total": { + "default": "mdi:radioactive" + }, + "radiation_rate": { + "default": "mdi:radioactive" + } + } + } +} diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json index 152c56e80f3..a1cd80cc3c7 100644 --- a/homeassistant/components/aranet/manifest.json +++ b/homeassistant/components/aranet/manifest.json @@ -13,7 +13,7 @@ "connectable": false } ], - "codeowners": ["@aschmitz", "@thecode"], + "codeowners": ["@aschmitz", "@thecode", "@anrijs"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/aranet", diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index b55fe2bc5ce..4509aa66027 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -23,6 +23,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + ATTR_MANUFACTURER, ATTR_NAME, ATTR_SW_VERSION, CONCENTRATION_PARTS_PER_MILLION, @@ -37,7 +38,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from .const import ARANET_MANUFACTURER_NAME, DOMAIN @dataclass(frozen=True) @@ -48,6 +49,7 @@ class AranetSensorEntityDescription(SensorEntityDescription): # Restrict the type to satisfy the type checker and catch attempts # to use UNDEFINED in the entity descriptions. name: str | None = None + scale: float | int = 1 SENSOR_DESCRIPTIONS = { @@ -79,6 +81,24 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, state_class=SensorStateClass.MEASUREMENT, ), + "radiation_rate": AranetSensorEntityDescription( + key="radiation_rate", + translation_key="radiation_rate", + name="Radiation Dose Rate", + native_unit_of_measurement="μSv/h", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + scale=0.001, + ), + "radiation_total": AranetSensorEntityDescription( + key="radiation_total", + translation_key="radiation_total", + name="Radiation Total Dose", + native_unit_of_measurement="mSv", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=4, + scale=0.000001, + ), "battery": AranetSensorEntityDescription( key="battery", name="Battery", @@ -115,6 +135,7 @@ def _sensor_device_info_to_hass( hass_device_info = DeviceInfo({}) if adv.readings and adv.readings.name: hass_device_info[ATTR_NAME] = adv.readings.name + hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME if adv.manufacturer_data: hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version) return hass_device_info @@ -132,6 +153,7 @@ def sensor_update_to_bluetooth_data_update( val = getattr(adv.readings, key) if val == -1: continue + val *= desc.scale data[tag] = val names[tag] = desc.name descs[tag] = desc diff --git a/homeassistant/components/aranet/strings.json b/homeassistant/components/aranet/strings.json index ac8d1907770..1cc695637d4 100644 --- a/homeassistant/components/aranet/strings.json +++ b/homeassistant/components/aranet/strings.json @@ -17,7 +17,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "integrations_diabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.", + "integrations_disabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.", "no_devices_found": "No unconfigured Aranet devices found.", "outdated_version": "This device is using outdated firmware. Please update it to at least v1.2.0 and try again." } diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index 4dc9434bd65..a6b32d56e4c 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -73,3 +73,11 @@ VALID_ARANET2_DATA_SERVICE_INFO = fake_service_info( 1794: b"\x01!\x04\x04\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x0c\x02\x00O\x00<\x00\x01\x00\x80" }, ) + +VALID_ARANET_RADIATION_DATA_SERVICE_INFO = fake_service_info( + "Aranet\u2622 12345", + "0000fce0-0000-1000-8000-00805f9b34fb", + { + 1794: b"\x02!&\x04\x01\x00`-\x00\x00\x08\x98\x05\x00n\x00\x00d\x00,\x01\xfd\x00\xc7" + }, +) diff --git a/tests/components/aranet/test_sensor.py b/tests/components/aranet/test_sensor.py index 20aea65989d..0d57f00fdf4 100644 --- a/tests/components/aranet/test_sensor.py +++ b/tests/components/aranet/test_sensor.py @@ -8,6 +8,7 @@ from homeassistant.core import HomeAssistant from . import ( DISABLED_INTEGRATIONS_SERVICE_INFO, VALID_ARANET2_DATA_SERVICE_INFO, + VALID_ARANET_RADIATION_DATA_SERVICE_INFO, VALID_DATA_SERVICE_INFO, ) @@ -15,6 +16,65 @@ from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info +async def test_sensors_aranet_radiation( + hass: HomeAssistant, entity_registry_enabled_by_default: None +) -> None: + """Test setting up creates the sensors for Aranet Radiation device.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:ee:ff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 0 + inject_bluetooth_service_info(hass, VALID_ARANET_RADIATION_DATA_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 4 + + batt_sensor = hass.states.get("sensor.aranet_12345_battery") + batt_sensor_attrs = batt_sensor.attributes + assert batt_sensor.state == "100" + assert batt_sensor_attrs[ATTR_FRIENDLY_NAME] == "Aranet\u2622 12345 Battery" + assert batt_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert batt_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humid_sensor = hass.states.get("sensor.aranet_12345_radiation_total_dose") + humid_sensor_attrs = humid_sensor.attributes + assert humid_sensor.state == "0.011616" + assert ( + humid_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Radiation Total Dose" + ) + assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "mSv" + assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + temp_sensor = hass.states.get("sensor.aranet_12345_radiation_dose_rate") + temp_sensor_attrs = temp_sensor.attributes + assert temp_sensor.state == "0.11" + assert ( + temp_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Radiation Dose Rate" + ) + assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "μSv/h" + assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + interval_sensor = hass.states.get("sensor.aranet_12345_update_interval") + interval_sensor_attrs = interval_sensor.attributes + assert interval_sensor.state == "300" + assert ( + interval_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Update Interval" + ) + assert interval_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "s" + assert interval_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + async def test_sensors_aranet2( hass: HomeAssistant, entity_registry_enabled_by_default: None ) -> None: From 1e06054344753cfa24da9d8dd02e767e3ecc4241 Mon Sep 17 00:00:00 2001 From: Patrick Frazer Date: Thu, 25 Apr 2024 16:00:21 -0400 Subject: [PATCH 416/426] Bump dropmqttapi to 1.0.3 (#116179) --- homeassistant/components/drop_connect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/drop_connect/manifest.json b/homeassistant/components/drop_connect/manifest.json index 5df34fce561..ed34767d6e0 100644 --- a/homeassistant/components/drop_connect/manifest.json +++ b/homeassistant/components/drop_connect/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/drop_connect", "iot_class": "local_push", "mqtt": ["drop_connect/discovery/#"], - "requirements": ["dropmqttapi==1.0.2"] + "requirements": ["dropmqttapi==1.0.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 20473b70121..905ed38fb44 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ dovado==0.4.1 dremel3dpy==2.1.1 # homeassistant.components.drop_connect -dropmqttapi==1.0.2 +dropmqttapi==1.0.3 # homeassistant.components.dsmr dsmr-parser==1.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1ee1c48d223..c8864ab9cbb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -610,7 +610,7 @@ discovery30303==0.2.1 dremel3dpy==2.1.1 # homeassistant.components.drop_connect -dropmqttapi==1.0.2 +dropmqttapi==1.0.3 # homeassistant.components.dsmr dsmr-parser==1.3.1 From 860ac450c4fd2d676182ff10e03949129dd1e4b5 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 25 Apr 2024 22:23:13 +0200 Subject: [PATCH 417/426] Use snapshots in Linear diagnostics tests (#116169) * Use snapshots in Linear diagnostics tests * Use snapshots in Linear diagnostics tests --- .../snapshots/test_diagnostics.ambr | 79 +++++++++++++++++++ .../linear_garage_door/test_diagnostics.py | 44 ++--------- tests/components/linear_garage_door/util.py | 1 + 3 files changed, 86 insertions(+), 38 deletions(-) create mode 100644 tests/components/linear_garage_door/snapshots/test_diagnostics.ambr diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..72886410924 --- /dev/null +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -0,0 +1,79 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'coordinator_data': dict({ + 'test1': dict({ + 'name': 'Test Garage 1', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'true', + 'Open_P': '100', + }), + 'Light': dict({ + 'On_B': 'true', + 'On_P': '100', + }), + }), + }), + 'test2': dict({ + 'name': 'Test Garage 2', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'false', + 'Open_P': '0', + }), + 'Light': dict({ + 'On_B': 'false', + 'On_P': '0', + }), + }), + }), + 'test3': dict({ + 'name': 'Test Garage 3', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'false', + 'Opening_P': '0', + }), + 'Light': dict({ + 'On_B': 'false', + 'On_P': '0', + }), + }), + }), + 'test4': dict({ + 'name': 'Test Garage 4', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'true', + 'Opening_P': '100', + }), + 'Light': dict({ + 'On_B': 'true', + 'On_P': '100', + }), + }), + }), + }), + 'entry': dict({ + 'data': dict({ + 'device_id': 'test-uuid', + 'email': '**REDACTED**', + 'password': '**REDACTED**', + 'site_id': 'test-site-id', + }), + 'disabled_by': None, + 'domain': 'linear_garage_door', + 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py index 0650196d619..a9565441bbb 100644 --- a/tests/components/linear_garage_door/test_diagnostics.py +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -1,5 +1,7 @@ """Test diagnostics of Linear Garage Door.""" +from syrupy import SnapshotAssertion + from homeassistant.core import HomeAssistant from .util import async_init_integration @@ -9,45 +11,11 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = await async_init_integration(hass) result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert result["entry"]["data"] == { - "email": "**REDACTED**", - "password": "**REDACTED**", - "site_id": "test-site-id", - "device_id": "test-uuid", - } - assert result["coordinator_data"] == { - "test1": { - "name": "Test Garage 1", - "subdevices": { - "GDO": {"Open_B": "true", "Open_P": "100"}, - "Light": {"On_B": "true", "On_P": "100"}, - }, - }, - "test2": { - "name": "Test Garage 2", - "subdevices": { - "GDO": {"Open_B": "false", "Open_P": "0"}, - "Light": {"On_B": "false", "On_P": "0"}, - }, - }, - "test3": { - "name": "Test Garage 3", - "subdevices": { - "GDO": {"Open_B": "false", "Opening_P": "0"}, - "Light": {"On_B": "false", "On_P": "0"}, - }, - }, - "test4": { - "name": "Test Garage 4", - "subdevices": { - "GDO": {"Open_B": "true", "Opening_P": "100"}, - "Light": {"On_B": "true", "On_P": "100"}, - }, - }, - } + assert result == snapshot diff --git a/tests/components/linear_garage_door/util.py b/tests/components/linear_garage_door/util.py index 1a849ae2348..30dbdbd06d5 100644 --- a/tests/components/linear_garage_door/util.py +++ b/tests/components/linear_garage_door/util.py @@ -12,6 +12,7 @@ async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: """Initialize mock integration.""" config_entry = MockConfigEntry( domain=DOMAIN, + entry_id="acefdd4b3a4a0911067d1cf51414201e", data={ "email": "test-email", "password": "test-password", From 421dbe1356358ef3f0481664194132bd7a8acb30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 25 Apr 2024 15:37:38 -0500 Subject: [PATCH 418/426] Bump bluetooth-auto-recovery to 1.4.2 (#116192) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index f6adcbed7d8..ed1e11d8ddd 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -17,7 +17,7 @@ "bleak==0.21.1", "bleak-retry-connector==3.5.0", "bluetooth-adapters==0.19.0", - "bluetooth-auto-recovery==1.4.1", + "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.19.0", "dbus-fast==2.21.1", "habluetooth==2.8.0" diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index aa29713a849..442db45e714 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -18,7 +18,7 @@ bcrypt==4.1.2 bleak-retry-connector==3.5.0 bleak==0.21.1 bluetooth-adapters==0.19.0 -bluetooth-auto-recovery==1.4.1 +bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.19.0 cached_ipaddress==0.3.0 certifi>=2021.5.30 diff --git a/requirements_all.txt b/requirements_all.txt index 905ed38fb44..5cfaef1fcb7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -582,7 +582,7 @@ bluemaestro-ble==0.2.3 bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.1 +bluetooth-auto-recovery==1.4.2 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c8864ab9cbb..403bb8c965d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -497,7 +497,7 @@ bluemaestro-ble==0.2.3 bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.1 +bluetooth-auto-recovery==1.4.2 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble From ccc2f6c5b5d545b7d9ef767ab74d7df3f581e986 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Thu, 25 Apr 2024 22:39:42 +0200 Subject: [PATCH 419/426] Add strict typing to husqvarna automower (#115374) --- .strict-typing | 1 + homeassistant/components/husqvarna_automower/api.py | 3 ++- mypy.ini | 10 ++++++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.strict-typing b/.strict-typing index 5985938885f..584ccc5ee0a 100644 --- a/.strict-typing +++ b/.strict-typing @@ -235,6 +235,7 @@ homeassistant.components.homeworks.* homeassistant.components.http.* homeassistant.components.huawei_lte.* homeassistant.components.humidifier.* +homeassistant.components.husqvarna_automower.* homeassistant.components.hydrawise.* homeassistant.components.hyperion.* homeassistant.components.ibeacon.* diff --git a/homeassistant/components/husqvarna_automower/api.py b/homeassistant/components/husqvarna_automower/api.py index e5dc00ad7cb..f1d3e1ef4fa 100644 --- a/homeassistant/components/husqvarna_automower/api.py +++ b/homeassistant/components/husqvarna_automower/api.py @@ -1,6 +1,7 @@ """API for Husqvarna Automower bound to Home Assistant OAuth.""" import logging +from typing import cast from aioautomower.auth import AbstractAuth from aioautomower.const import API_BASE_URL @@ -26,4 +27,4 @@ class AsyncConfigEntryAuth(AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" await self._oauth_session.async_ensure_token_valid() - return self._oauth_session.token["access_token"] + return cast(str, self._oauth_session.token["access_token"]) diff --git a/mypy.ini b/mypy.ini index 216d43322a4..611dd176fbf 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2112,6 +2112,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.husqvarna_automower.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.hydrawise.*] check_untyped_defs = true disallow_incomplete_defs = true From 4a1e1bd1b9e8b91ff4736f2660e3938212e1c8e9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 25 Apr 2024 22:57:29 +0200 Subject: [PATCH 420/426] Improve linear coordinator (#116167) * Improve linear coordinator * Fix * Fix --- .../components/linear_garage_door/__init__.py | 2 +- .../linear_garage_door/coordinator.py | 77 ++++++------ .../components/linear_garage_door/cover.py | 110 +++++++----------- .../linear_garage_door/diagnostics.py | 6 +- .../linear_garage_door/test_cover.py | 32 ++--- 5 files changed, 101 insertions(+), 126 deletions(-) diff --git a/homeassistant/components/linear_garage_door/__init__.py b/homeassistant/components/linear_garage_door/__init__.py index e21d8eaba58..16e743e00b5 100644 --- a/homeassistant/components/linear_garage_door/__init__.py +++ b/homeassistant/components/linear_garage_door/__init__.py @@ -15,7 +15,7 @@ PLATFORMS: list[Platform] = [Platform.COVER] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Linear Garage Door from a config entry.""" - coordinator = LinearUpdateCoordinator(hass, entry) + coordinator = LinearUpdateCoordinator(hass) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/linear_garage_door/coordinator.py b/homeassistant/components/linear_garage_door/coordinator.py index b771b552b62..91ff0165163 100644 --- a/homeassistant/components/linear_garage_door/coordinator.py +++ b/homeassistant/components/linear_garage_door/coordinator.py @@ -2,9 +2,11 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable +from dataclasses import dataclass from datetime import timedelta import logging -from typing import Any +from typing import Any, TypeVar from linear_garage_door import Linear from linear_garage_door.errors import InvalidLoginError @@ -17,46 +19,58 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator _LOGGER = logging.getLogger(__name__) +_T = TypeVar("_T") -class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + +@dataclass +class LinearDevice: + """Linear device dataclass.""" + + name: str + subdevices: dict[str, dict[str, str]] + + +class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, LinearDevice]]): """DataUpdateCoordinator for Linear.""" - _email: str - _password: str - _device_id: str - _site_id: str - _devices: list[dict[str, list[str] | str]] | None - _linear: Linear + _devices: list[dict[str, Any]] | None = None + config_entry: ConfigEntry - def __init__( - self, - hass: HomeAssistant, - entry: ConfigEntry, - ) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize DataUpdateCoordinator for Linear.""" - self._email = entry.data["email"] - self._password = entry.data["password"] - self._device_id = entry.data["device_id"] - self._site_id = entry.data["site_id"] - self._devices = None - super().__init__( hass, _LOGGER, name="Linear Garage Door", update_interval=timedelta(seconds=60), ) + self.site_id = self.config_entry.data["site_id"] - async def _async_update_data(self) -> dict[str, Any]: + async def _async_update_data(self) -> dict[str, LinearDevice]: """Get the data for Linear.""" - linear = Linear() + async def update_data(linear: Linear) -> dict[str, Any]: + if not self._devices: + self._devices = await linear.get_devices(self.site_id) + data = {} + + for device in self._devices: + device_id = str(device["id"]) + state = await linear.get_device_state(device_id) + data[device_id] = LinearDevice(device["name"], state) + return data + + return await self.execute(update_data) + + async def execute(self, func: Callable[[Linear], Awaitable[_T]]) -> _T: + """Execute an API call.""" + linear = Linear() try: await linear.login( - email=self._email, - password=self._password, - device_id=self._device_id, + email=self.config_entry.data["email"], + password=self.config_entry.data["password"], + device_id=self.config_entry.data["device_id"], client_session=async_get_clientsession(self.hass), ) except InvalidLoginError as err: @@ -66,17 +80,6 @@ class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ): raise ConfigEntryAuthFailed from err raise ConfigEntryNotReady from err - - if not self._devices: - self._devices = await linear.get_devices(self._site_id) - - data = {} - - for device in self._devices: - device_id = str(device["id"]) - state = await linear.get_device_state(device_id) - data[device_id] = {"name": device["name"], "subdevices": state} - + result = await func(linear) await linear.close() - - return data + return result diff --git a/homeassistant/components/linear_garage_door/cover.py b/homeassistant/components/linear_garage_door/cover.py index 3474e9d3acb..b3d720e531a 100644 --- a/homeassistant/components/linear_garage_door/cover.py +++ b/homeassistant/components/linear_garage_door/cover.py @@ -3,8 +3,6 @@ from datetime import timedelta from typing import Any -from linear_garage_door import Linear - from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, @@ -12,13 +10,12 @@ from homeassistant.components.cover import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import LinearUpdateCoordinator +from .coordinator import LinearDevice, LinearUpdateCoordinator SUPPORTED_SUBDEVICES = ["GDO"] PARALLEL_UPDATES = 1 @@ -32,118 +29,89 @@ async def async_setup_entry( ) -> None: """Set up Linear Garage Door cover.""" coordinator: LinearUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - data = coordinator.data - device_list: list[LinearCoverEntity] = [] - - for device_id in data: - device_list.extend( - LinearCoverEntity( - device_id=device_id, - device_name=data[device_id]["name"], - subdevice=subdev, - config_entry=config_entry, - coordinator=coordinator, - ) - for subdev in data[device_id]["subdevices"] - if subdev in SUPPORTED_SUBDEVICES - ) - async_add_entities(device_list) + async_add_entities( + LinearCoverEntity(coordinator, device_id, sub_device_id) + for device_id, device_data in coordinator.data.items() + for sub_device_id in device_data.subdevices + if sub_device_id in SUPPORTED_SUBDEVICES + ) class LinearCoverEntity(CoordinatorEntity[LinearUpdateCoordinator], CoverEntity): """Representation of a Linear cover.""" _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + _attr_has_entity_name = True + _attr_name = None + _attr_device_class = CoverDeviceClass.GARAGE def __init__( self, - device_id: str, - device_name: str, - subdevice: str, - config_entry: ConfigEntry, coordinator: LinearUpdateCoordinator, + device_id: str, + sub_device_id: str, ) -> None: """Init with device ID and name.""" super().__init__(coordinator) - - self._attr_has_entity_name = True - self._attr_name = None self._device_id = device_id - self._device_name = device_name - self._subdevice = subdevice - self._attr_device_class = CoverDeviceClass.GARAGE - self._attr_unique_id = f"{device_id}-{subdevice}" - self._config_entry = config_entry - - def _get_data(self, data_property: str) -> str: - """Get a property of the subdevice.""" - return str( - self.coordinator.data[self._device_id]["subdevices"][self._subdevice].get( - data_property - ) - ) - - @property - def device_info(self) -> DeviceInfo: - """Return device info of a garage door.""" - return DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - name=self._device_name, + self._sub_device_id = sub_device_id + self._attr_unique_id = f"{device_id}-{sub_device_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, sub_device_id)}, + name=self.linear_device.name, manufacturer="Linear", model="Garage Door Opener", ) + @property + def linear_device(self) -> LinearDevice: + """Return the Linear device.""" + return self.coordinator.data[self._device_id] + + @property + def sub_device(self) -> dict[str, str]: + """Return the subdevice.""" + return self.linear_device.subdevices[self._sub_device_id] + @property def is_closed(self) -> bool: """Return if cover is closed.""" - return bool(self._get_data("Open_B") == "false") + return self.sub_device.get("Open_B") == "false" @property def is_opened(self) -> bool: """Return if cover is open.""" - return bool(self._get_data("Open_B") == "true") + return self.sub_device.get("Open_B") == "true" @property def is_opening(self) -> bool: """Return if cover is opening.""" - return bool(self._get_data("Opening_P") == "0") + return self.sub_device.get("Opening_P") == "0" @property def is_closing(self) -> bool: """Return if cover is closing.""" - return bool(self._get_data("Opening_P") == "100") + return self.sub_device.get("Opening_P") == "100" async def async_close_cover(self, **kwargs: Any) -> None: """Close the garage door.""" if self.is_closed: return - linear = Linear() - - await linear.login( - email=self._config_entry.data["email"], - password=self._config_entry.data["password"], - device_id=self._config_entry.data["device_id"], - client_session=async_get_clientsession(self.hass), + await self.coordinator.execute( + lambda linear: linear.operate_device( + self._device_id, self._sub_device_id, "Close" + ) ) - await linear.operate_device(self._device_id, self._subdevice, "Close") - await linear.close() - async def async_open_cover(self, **kwargs: Any) -> None: """Open the garage door.""" if self.is_opened: return - linear = Linear() - - await linear.login( - email=self._config_entry.data["email"], - password=self._config_entry.data["password"], - device_id=self._config_entry.data["device_id"], - client_session=async_get_clientsession(self.hass), + await self.coordinator.execute( + lambda linear: linear.operate_device( + self._device_id, self._sub_device_id, "Open" + ) ) - - await linear.operate_device(self._device_id, self._subdevice, "Open") - await linear.close() diff --git a/homeassistant/components/linear_garage_door/diagnostics.py b/homeassistant/components/linear_garage_door/diagnostics.py index fc4906daa77..21414f02f87 100644 --- a/homeassistant/components/linear_garage_door/diagnostics.py +++ b/homeassistant/components/linear_garage_door/diagnostics.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -23,5 +24,8 @@ async def async_get_config_entry_diagnostics( return { "entry": async_redact_data(entry.as_dict(), TO_REDACT), - "coordinator_data": coordinator.data, + "coordinator_data": { + device_id: asdict(device_data) + for device_id, device_data in coordinator.data.items() + }, } diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py index 9db7b80fd0e..6236d2ba39c 100644 --- a/tests/components/linear_garage_door/test_cover.py +++ b/tests/components/linear_garage_door/test_cover.py @@ -45,7 +45,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: await async_init_integration(hass) with patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device" ) as operate_device: await hass.services.async_call( COVER_DOMAIN, @@ -58,15 +58,15 @@ async def test_open_cover(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device", return_value=None, ) as operate_device, patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -80,11 +80,11 @@ async def test_open_cover(hass: HomeAssistant) -> None: assert operate_device.call_count == 1 with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", return_value=[ { "id": "test1", @@ -99,7 +99,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: ], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", side_effect=lambda id: { "test1": { "GDO": {"Open_B": "true", "Open_P": "100"}, @@ -120,7 +120,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: }[id], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -136,7 +136,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: await async_init_integration(hass) with patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device" ) as operate_device: await hass.services.async_call( COVER_DOMAIN, @@ -149,15 +149,15 @@ async def test_close_cover(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device", return_value=None, ) as operate_device, patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -171,11 +171,11 @@ async def test_close_cover(hass: HomeAssistant) -> None: assert operate_device.call_count == 1 with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", return_value=[ { "id": "test1", @@ -190,7 +190,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: ], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", side_effect=lambda id: { "test1": { "GDO": {"Open_B": "true", "Opening_P": "100"}, @@ -211,7 +211,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: }[id], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): From 372c6c7874c15d29522587428e35b2586d866ada Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 26 Apr 2024 02:09:54 +0200 Subject: [PATCH 421/426] Use existing monotonic timestamp on mqtt messages for debugging (#116196) --- homeassistant/components/mqtt/client.py | 5 +- homeassistant/components/mqtt/debug_info.py | 16 +++-- homeassistant/components/mqtt/models.py | 3 +- homeassistant/helpers/service_info/mqtt.py | 3 +- tests/common.py | 1 + tests/components/mqtt/test_common.py | 78 ++++++++++----------- 6 files changed, 54 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 133991ade16..f01b8e80b3d 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -40,7 +40,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass -from homeassistant.util import dt as dt_util from homeassistant.util.async_ import create_eager_task from homeassistant.util.logging import catch_log_exception @@ -991,8 +990,6 @@ class MQTT: msg.qos, msg.payload[0:8192], ) - timestamp = dt_util.utcnow() - subscriptions = self._matching_subscriptions(topic) msg_cache_by_subscription_topic: dict[str, ReceiveMessage] = {} @@ -1030,7 +1027,7 @@ class MQTT: msg.qos, msg.retain, subscription_topic, - timestamp, + msg.timestamp, ) msg_cache_by_subscription_topic[subscription_topic] = receive_msg else: diff --git a/homeassistant/components/mqtt/debug_info.py b/homeassistant/components/mqtt/debug_info.py index 7ff93a6bd06..e84dedde785 100644 --- a/homeassistant/components/mqtt/debug_info.py +++ b/homeassistant/components/mqtt/debug_info.py @@ -7,6 +7,7 @@ from collections.abc import Callable from dataclasses import dataclass import datetime as dt from functools import wraps +import time from typing import TYPE_CHECKING, Any from homeassistant.core import HomeAssistant @@ -57,7 +58,7 @@ class TimestampedPublishMessage: payload: PublishPayloadType qos: int retain: bool - timestamp: dt.datetime + timestamp: float def log_message( @@ -77,7 +78,7 @@ def log_message( "messages": deque([], STORED_MESSAGES), } msg = TimestampedPublishMessage( - topic, payload, qos, retain, timestamp=dt_util.utcnow() + topic, payload, qos, retain, timestamp=time.monotonic() ) entity_info["transmitted"][topic]["messages"].append(msg) @@ -175,6 +176,7 @@ def remove_trigger_discovery_data( def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: entity_info = get_mqtt_data(hass).debug_info_entities[entity_id] + monotonic_time_diff = time.time() - time.monotonic() subscriptions = [ { "topic": topic, @@ -183,7 +185,10 @@ def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: "payload": str(msg.payload), "qos": msg.qos, "retain": msg.retain, - "time": msg.timestamp, + "time": dt_util.utc_from_timestamp( + msg.timestamp + monotonic_time_diff, + tz=dt.UTC, + ), "topic": msg.topic, } for msg in subscription["messages"] @@ -199,7 +204,10 @@ def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: "payload": str(msg.payload), "qos": msg.qos, "retain": msg.retain, - "time": msg.timestamp, + "time": dt_util.utc_from_timestamp( + msg.timestamp + monotonic_time_diff, + tz=dt.UTC, + ), "topic": msg.topic, } for msg in subscription["messages"] diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index f53643268e7..17640c3e733 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -7,7 +7,6 @@ import asyncio from collections import deque from collections.abc import Callable, Coroutine from dataclasses import dataclass, field -import datetime as dt from enum import StrEnum import logging from typing import TYPE_CHECKING, Any, TypedDict @@ -67,7 +66,7 @@ class ReceiveMessage: qos: int retain: bool subscribed_topic: str - timestamp: dt.datetime + timestamp: float AsyncMessageCallbackType = Callable[[ReceiveMessage], Coroutine[Any, Any, None]] diff --git a/homeassistant/helpers/service_info/mqtt.py b/homeassistant/helpers/service_info/mqtt.py index 172a5eeff33..b683745e1c0 100644 --- a/homeassistant/helpers/service_info/mqtt.py +++ b/homeassistant/helpers/service_info/mqtt.py @@ -1,7 +1,6 @@ """MQTT Discovery data.""" from dataclasses import dataclass -import datetime as dt from homeassistant.data_entry_flow import BaseServiceInfo @@ -17,4 +16,4 @@ class MqttServiceInfo(BaseServiceInfo): qos: int retain: bool subscribed_topic: str - timestamp: dt.datetime + timestamp: float diff --git a/tests/common.py b/tests/common.py index b5fe0f7bae1..7bb16ce5c54 100644 --- a/tests/common.py +++ b/tests/common.py @@ -449,6 +449,7 @@ def async_fire_mqtt_message( msg.payload = payload msg.qos = qos msg.retain = retain + msg.timestamp = time.monotonic() mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index e9c3b57777f..ba767f51ac6 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -3,7 +3,6 @@ from collections.abc import Iterable from contextlib import suppress import copy -from datetime import datetime import json from pathlib import Path from typing import Any @@ -1326,12 +1325,12 @@ async def help_test_entity_debug_info_max_messages( "subscriptions" ] - start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) - with freeze_time(start_dt): + with freeze_time(start_dt := dt_util.utcnow()): for i in range(debug_info.STORED_MESSAGES + 1): async_fire_mqtt_message(hass, "test-topic", f"{i}") - debug_info_data = debug_info.info_for_device(hass, device.id) + debug_info_data = debug_info.info_for_device(hass, device.id) + assert len(debug_info_data["entities"][0]["subscriptions"]) == 1 assert ( len(debug_info_data["entities"][0]["subscriptions"][0]["messages"]) @@ -1401,36 +1400,35 @@ async def help_test_entity_debug_info_message( debug_info_data = debug_info.info_for_device(hass, device.id) - start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) - if state_topic is not None: assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 assert {"topic": state_topic, "messages": []} in debug_info_data["entities"][0][ "subscriptions" ] - with freeze_time(start_dt): + with freeze_time(start_dt := dt_util.utcnow()): async_fire_mqtt_message(hass, str(state_topic), state_payload) - debug_info_data = debug_info.info_for_device(hass, device.id) - assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 - assert { - "topic": state_topic, - "messages": [ - { - "payload": str(state_payload), - "qos": 0, - "retain": False, - "time": start_dt, - "topic": state_topic, - } - ], - } in debug_info_data["entities"][0]["subscriptions"] + debug_info_data = debug_info.info_for_device(hass, device.id) + assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 + assert { + "topic": state_topic, + "messages": [ + { + "payload": str(state_payload), + "qos": 0, + "retain": False, + "time": start_dt, + "topic": state_topic, + } + ], + } in debug_info_data["entities"][0]["subscriptions"] expected_transmissions = [] - if service: - # Trigger an outgoing MQTT message - with freeze_time(start_dt): + + with freeze_time(start_dt := dt_util.utcnow()): + if service: + # Trigger an outgoing MQTT message if service: service_data = {ATTR_ENTITY_ID: f"{domain}.beer_test"} if service_parameters: @@ -1443,23 +1441,23 @@ async def help_test_entity_debug_info_message( blocking=True, ) - expected_transmissions = [ - { - "topic": command_topic, - "messages": [ - { - "payload": str(command_payload), - "qos": 0, - "retain": False, - "time": start_dt, - "topic": command_topic, - } - ], - } - ] + expected_transmissions = [ + { + "topic": command_topic, + "messages": [ + { + "payload": str(command_payload), + "qos": 0, + "retain": False, + "time": start_dt, + "topic": command_topic, + } + ], + } + ] - debug_info_data = debug_info.info_for_device(hass, device.id) - assert debug_info_data["entities"][0]["transmitted"] == expected_transmissions + debug_info_data = debug_info.info_for_device(hass, device.id) + assert debug_info_data["entities"][0]["transmitted"] == expected_transmissions async def help_test_entity_debug_info_remove( From db8597a742d3bee2abde8286c1e628c21bfc1114 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 26 Apr 2024 02:12:36 +0200 Subject: [PATCH 422/426] Reduce scope of JSON/XML test fixtures (#116197) --- tests/components/airnow/conftest.py | 2 +- tests/components/airvisual_pro/conftest.py | 2 +- tests/components/awair/conftest.py | 22 +-- tests/components/blueprint/test_importer.py | 2 +- tests/components/evil_genius_labs/conftest.py | 6 +- tests/components/hue/conftest.py | 2 +- tests/components/insteon/test_api_aldb.py | 2 +- .../components/insteon/test_api_properties.py | 4 +- tests/components/insteon/test_api_scenes.py | 2 +- tests/components/mysensors/conftest.py | 38 +++--- tests/components/myuplink/conftest.py | 4 +- tests/components/plex/conftest.py | 128 +++++++++--------- tests/components/sensibo/conftest.py | 2 +- tests/components/smhi/conftest.py | 4 +- tests/components/sonos/conftest.py | 2 +- tests/components/soundtouch/conftest.py | 32 ++--- tests/components/tradfri/conftest.py | 4 +- tests/components/yale_smart_alarm/conftest.py | 2 +- tests/components/zwave_js/conftest.py | 128 +++++++++--------- 19 files changed, 194 insertions(+), 194 deletions(-) diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index 1010a45b8fb..db4400f85d3 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -44,7 +44,7 @@ def options_fixture(hass): } -@pytest.fixture(name="data", scope="session") +@pytest.fixture(name="data", scope="package") def data_fixture(): """Define a fixture for response data.""" return json.loads(load_fixture("response.json", "airnow")) diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index 719b25b3cdf..c90eb432c25 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -56,7 +56,7 @@ def disconnect_fixture(): return AsyncMock() -@pytest.fixture(name="data", scope="session") +@pytest.fixture(name="data", scope="package") def data_fixture(): """Define an update coordinator data example.""" return json.loads(load_fixture("data.json", "airvisual_pro")) diff --git a/tests/components/awair/conftest.py b/tests/components/awair/conftest.py index ec15561cc05..91c3d31e35b 100644 --- a/tests/components/awair/conftest.py +++ b/tests/components/awair/conftest.py @@ -7,67 +7,67 @@ import pytest from tests.common import load_fixture -@pytest.fixture(name="cloud_devices", scope="session") +@pytest.fixture(name="cloud_devices", scope="package") def cloud_devices_fixture(): """Fixture representing devices returned by Awair Cloud API.""" return json.loads(load_fixture("awair/cloud_devices.json")) -@pytest.fixture(name="local_devices", scope="session") +@pytest.fixture(name="local_devices", scope="package") def local_devices_fixture(): """Fixture representing devices returned by Awair local API.""" return json.loads(load_fixture("awair/local_devices.json")) -@pytest.fixture(name="gen1_data", scope="session") +@pytest.fixture(name="gen1_data", scope="package") def gen1_data_fixture(): """Fixture representing data returned from Gen1 Awair device.""" return json.loads(load_fixture("awair/awair.json")) -@pytest.fixture(name="gen2_data", scope="session") +@pytest.fixture(name="gen2_data", scope="package") def gen2_data_fixture(): """Fixture representing data returned from Gen2 Awair device.""" return json.loads(load_fixture("awair/awair-r2.json")) -@pytest.fixture(name="glow_data", scope="session") +@pytest.fixture(name="glow_data", scope="package") def glow_data_fixture(): """Fixture representing data returned from Awair glow device.""" return json.loads(load_fixture("awair/glow.json")) -@pytest.fixture(name="mint_data", scope="session") +@pytest.fixture(name="mint_data", scope="package") def mint_data_fixture(): """Fixture representing data returned from Awair mint device.""" return json.loads(load_fixture("awair/mint.json")) -@pytest.fixture(name="no_devices", scope="session") +@pytest.fixture(name="no_devices", scope="package") def no_devicess_fixture(): """Fixture representing when no devices are found in Awair's cloud API.""" return json.loads(load_fixture("awair/no_devices.json")) -@pytest.fixture(name="awair_offline", scope="session") +@pytest.fixture(name="awair_offline", scope="package") def awair_offline_fixture(): """Fixture representing when Awair devices are offline.""" return json.loads(load_fixture("awair/awair-offline.json")) -@pytest.fixture(name="omni_data", scope="session") +@pytest.fixture(name="omni_data", scope="package") def omni_data_fixture(): """Fixture representing data returned from Awair omni device.""" return json.loads(load_fixture("awair/omni.json")) -@pytest.fixture(name="user", scope="session") +@pytest.fixture(name="user", scope="package") def user_fixture(): """Fixture representing the User object returned from Awair's Cloud API.""" return json.loads(load_fixture("awair/user.json")) -@pytest.fixture(name="local_data", scope="session") +@pytest.fixture(name="local_data", scope="package") def local_data_fixture(): """Fixture representing data returned from Awair local device.""" return json.loads(load_fixture("awair/awair-local.json")) diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index 76f3ff36d05..275ee08863e 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -13,7 +13,7 @@ from tests.common import load_fixture from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def community_post(): """Topic JSON with a codeblock marked as auto syntax.""" return load_fixture("blueprint/community_post.json") diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index 49092da75c7..3941917e130 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -10,20 +10,20 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def all_fixture(): """Fixture data.""" data = json.loads(load_fixture("data.json", "evil_genius_labs")) return {item["name"]: item for item in data} -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def info_fixture(): """Fixture info.""" return json.loads(load_fixture("info.json", "evil_genius_labs")) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def product_fixture(): """Fixture info.""" return {"productName": "Fibonacci256"} diff --git a/tests/components/hue/conftest.py b/tests/components/hue/conftest.py index f87faf6294b..ac827d42d95 100644 --- a/tests/components/hue/conftest.py +++ b/tests/components/hue/conftest.py @@ -136,7 +136,7 @@ def create_mock_api_v1(hass): return api -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def v2_resources_test_data(): """Load V2 resources mock data.""" return json.loads(load_fixture("hue/v2_resources.json")) diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index 4e0df12c6f1..c919e7a9d22 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -26,7 +26,7 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="aldb_data", scope="session") +@pytest.fixture(name="aldb_data", scope="module") def aldb_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/aldb_data.json")) diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index d2a388929b5..74ef759006c 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -29,13 +29,13 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="kpl_properties_data", scope="session") +@pytest.fixture(name="kpl_properties_data", scope="module") def kpl_properties_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/kpl_properties.json")) -@pytest.fixture(name="iolinc_properties_data", scope="session") +@pytest.fixture(name="iolinc_properties_data", scope="module") def iolinc_properties_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/iolinc_properties.json")) diff --git a/tests/components/insteon/test_api_scenes.py b/tests/components/insteon/test_api_scenes.py index 04fc74c89d1..1b8d4d50f08 100644 --- a/tests/components/insteon/test_api_scenes.py +++ b/tests/components/insteon/test_api_scenes.py @@ -18,7 +18,7 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="scene_data", scope="session") +@pytest.fixture(name="scene_data", scope="module") def aldb_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/scene_data.json")) diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index e18043fda1f..01d6f5d9620 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -206,7 +206,7 @@ def update_gateway_nodes( return nodes -@pytest.fixture(name="cover_node_binary_state", scope="session") +@pytest.fixture(name="cover_node_binary_state", scope="package") def cover_node_binary_state_fixture() -> dict: """Load the cover node state.""" return load_nodes_state("cover_node_binary_state.json") @@ -221,7 +221,7 @@ def cover_node_binary( return nodes[1] -@pytest.fixture(name="cover_node_percentage_state", scope="session") +@pytest.fixture(name="cover_node_percentage_state", scope="package") def cover_node_percentage_state_fixture() -> dict: """Load the cover node state.""" return load_nodes_state("cover_node_percentage_state.json") @@ -236,7 +236,7 @@ def cover_node_percentage( return nodes[1] -@pytest.fixture(name="door_sensor_state", scope="session") +@pytest.fixture(name="door_sensor_state", scope="package") def door_sensor_state_fixture() -> dict: """Load the door sensor state.""" return load_nodes_state("door_sensor_state.json") @@ -249,7 +249,7 @@ def door_sensor(gateway_nodes: dict[int, Sensor], door_sensor_state: dict) -> Se return nodes[1] -@pytest.fixture(name="gps_sensor_state", scope="session") +@pytest.fixture(name="gps_sensor_state", scope="package") def gps_sensor_state_fixture() -> dict: """Load the gps sensor state.""" return load_nodes_state("gps_sensor_state.json") @@ -262,7 +262,7 @@ def gps_sensor(gateway_nodes: dict[int, Sensor], gps_sensor_state: dict) -> Sens return nodes[1] -@pytest.fixture(name="dimmer_node_state", scope="session") +@pytest.fixture(name="dimmer_node_state", scope="package") def dimmer_node_state_fixture() -> dict: """Load the dimmer node state.""" return load_nodes_state("dimmer_node_state.json") @@ -275,7 +275,7 @@ def dimmer_node(gateway_nodes: dict[int, Sensor], dimmer_node_state: dict) -> Se return nodes[1] -@pytest.fixture(name="hvac_node_auto_state", scope="session") +@pytest.fixture(name="hvac_node_auto_state", scope="package") def hvac_node_auto_state_fixture() -> dict: """Load the hvac node auto state.""" return load_nodes_state("hvac_node_auto_state.json") @@ -290,7 +290,7 @@ def hvac_node_auto( return nodes[1] -@pytest.fixture(name="hvac_node_cool_state", scope="session") +@pytest.fixture(name="hvac_node_cool_state", scope="package") def hvac_node_cool_state_fixture() -> dict: """Load the hvac node cool state.""" return load_nodes_state("hvac_node_cool_state.json") @@ -305,7 +305,7 @@ def hvac_node_cool( return nodes[1] -@pytest.fixture(name="hvac_node_heat_state", scope="session") +@pytest.fixture(name="hvac_node_heat_state", scope="package") def hvac_node_heat_state_fixture() -> dict: """Load the hvac node heat state.""" return load_nodes_state("hvac_node_heat_state.json") @@ -320,7 +320,7 @@ def hvac_node_heat( return nodes[1] -@pytest.fixture(name="power_sensor_state", scope="session") +@pytest.fixture(name="power_sensor_state", scope="package") def power_sensor_state_fixture() -> dict: """Load the power sensor state.""" return load_nodes_state("power_sensor_state.json") @@ -333,7 +333,7 @@ def power_sensor(gateway_nodes: dict[int, Sensor], power_sensor_state: dict) -> return nodes[1] -@pytest.fixture(name="rgb_node_state", scope="session") +@pytest.fixture(name="rgb_node_state", scope="package") def rgb_node_state_fixture() -> dict: """Load the rgb node state.""" return load_nodes_state("rgb_node_state.json") @@ -346,7 +346,7 @@ def rgb_node(gateway_nodes: dict[int, Sensor], rgb_node_state: dict) -> Sensor: return nodes[1] -@pytest.fixture(name="rgbw_node_state", scope="session") +@pytest.fixture(name="rgbw_node_state", scope="package") def rgbw_node_state_fixture() -> dict: """Load the rgbw node state.""" return load_nodes_state("rgbw_node_state.json") @@ -359,7 +359,7 @@ def rgbw_node(gateway_nodes: dict[int, Sensor], rgbw_node_state: dict) -> Sensor return nodes[1] -@pytest.fixture(name="energy_sensor_state", scope="session") +@pytest.fixture(name="energy_sensor_state", scope="package") def energy_sensor_state_fixture() -> dict: """Load the energy sensor state.""" return load_nodes_state("energy_sensor_state.json") @@ -374,7 +374,7 @@ def energy_sensor( return nodes[1] -@pytest.fixture(name="sound_sensor_state", scope="session") +@pytest.fixture(name="sound_sensor_state", scope="package") def sound_sensor_state_fixture() -> dict: """Load the sound sensor state.""" return load_nodes_state("sound_sensor_state.json") @@ -387,7 +387,7 @@ def sound_sensor(gateway_nodes: dict[int, Sensor], sound_sensor_state: dict) -> return nodes[1] -@pytest.fixture(name="distance_sensor_state", scope="session") +@pytest.fixture(name="distance_sensor_state", scope="package") def distance_sensor_state_fixture() -> dict: """Load the distance sensor state.""" return load_nodes_state("distance_sensor_state.json") @@ -402,7 +402,7 @@ def distance_sensor( return nodes[1] -@pytest.fixture(name="ir_transceiver_state", scope="session") +@pytest.fixture(name="ir_transceiver_state", scope="package") def ir_transceiver_state_fixture() -> dict: """Load the ir transceiver state.""" return load_nodes_state("ir_transceiver_state.json") @@ -417,7 +417,7 @@ def ir_transceiver( return nodes[1] -@pytest.fixture(name="relay_node_state", scope="session") +@pytest.fixture(name="relay_node_state", scope="package") def relay_node_state_fixture() -> dict: """Load the relay node state.""" return load_nodes_state("relay_node_state.json") @@ -430,7 +430,7 @@ def relay_node(gateway_nodes: dict[int, Sensor], relay_node_state: dict) -> Sens return nodes[1] -@pytest.fixture(name="temperature_sensor_state", scope="session") +@pytest.fixture(name="temperature_sensor_state", scope="package") def temperature_sensor_state_fixture() -> dict: """Load the temperature sensor state.""" return load_nodes_state("temperature_sensor_state.json") @@ -445,7 +445,7 @@ def temperature_sensor( return nodes[1] -@pytest.fixture(name="text_node_state", scope="session") +@pytest.fixture(name="text_node_state", scope="package") def text_node_state_fixture() -> dict: """Load the text node state.""" return load_nodes_state("text_node_state.json") @@ -458,7 +458,7 @@ def text_node(gateway_nodes: dict[int, Sensor], text_node_state: dict) -> Sensor return nodes[1] -@pytest.fixture(name="battery_sensor_state", scope="session") +@pytest.fixture(name="battery_sensor_state", scope="package") def battery_sensor_state_fixture() -> dict: """Load the battery sensor state.""" return load_nodes_state("battery_sensor_state.json") diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index e08dc4255be..3ecb7e08356 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -71,7 +71,7 @@ async def setup_credentials(hass: HomeAssistant) -> None: # Fixture group for device API endpoint. -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def load_device_file() -> str: """Fixture for loading device file.""" return load_fixture("device.json", DOMAIN) @@ -92,7 +92,7 @@ def load_systems_jv_file(load_systems_file: str) -> dict[str, Any]: return json_loads(load_systems_file) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def load_systems_file() -> str: """Load fixture file for systems.""" return load_fixture("systems-2dev.json", DOMAIN) diff --git a/tests/components/plex/conftest.py b/tests/components/plex/conftest.py index 7e82b1c9d26..d00b8eb944b 100644 --- a/tests/components/plex/conftest.py +++ b/tests/components/plex/conftest.py @@ -29,253 +29,253 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: yield mock_setup_entry -@pytest.fixture(name="album", scope="session") +@pytest.fixture(name="album", scope="package") def album_fixture(): """Load album payload and return it.""" return load_fixture("plex/album.xml") -@pytest.fixture(name="artist_albums", scope="session") +@pytest.fixture(name="artist_albums", scope="package") def artist_albums_fixture(): """Load artist's albums payload and return it.""" return load_fixture("plex/artist_albums.xml") -@pytest.fixture(name="children_20", scope="session") +@pytest.fixture(name="children_20", scope="package") def children_20_fixture(): """Load children payload for item 20 and return it.""" return load_fixture("plex/children_20.xml") -@pytest.fixture(name="children_30", scope="session") +@pytest.fixture(name="children_30", scope="package") def children_30_fixture(): """Load children payload for item 30 and return it.""" return load_fixture("plex/children_30.xml") -@pytest.fixture(name="children_200", scope="session") +@pytest.fixture(name="children_200", scope="package") def children_200_fixture(): """Load children payload for item 200 and return it.""" return load_fixture("plex/children_200.xml") -@pytest.fixture(name="children_300", scope="session") +@pytest.fixture(name="children_300", scope="package") def children_300_fixture(): """Load children payload for item 300 and return it.""" return load_fixture("plex/children_300.xml") -@pytest.fixture(name="empty_library", scope="session") +@pytest.fixture(name="empty_library", scope="package") def empty_library_fixture(): """Load an empty library payload and return it.""" return load_fixture("plex/empty_library.xml") -@pytest.fixture(name="empty_payload", scope="session") +@pytest.fixture(name="empty_payload", scope="package") def empty_payload_fixture(): """Load an empty payload and return it.""" return load_fixture("plex/empty_payload.xml") -@pytest.fixture(name="grandchildren_300", scope="session") +@pytest.fixture(name="grandchildren_300", scope="package") def grandchildren_300_fixture(): """Load grandchildren payload for item 300 and return it.""" return load_fixture("plex/grandchildren_300.xml") -@pytest.fixture(name="library_movies_all", scope="session") +@pytest.fixture(name="library_movies_all", scope="package") def library_movies_all_fixture(): """Load payload for all items in the movies library and return it.""" return load_fixture("plex/library_movies_all.xml") -@pytest.fixture(name="library_movies_metadata", scope="session") +@pytest.fixture(name="library_movies_metadata", scope="package") def library_movies_metadata_fixture(): """Load payload for metadata in the movies library and return it.""" return load_fixture("plex/library_movies_metadata.xml") -@pytest.fixture(name="library_movies_collections", scope="session") +@pytest.fixture(name="library_movies_collections", scope="package") def library_movies_collections_fixture(): """Load payload for collections in the movies library and return it.""" return load_fixture("plex/library_movies_collections.xml") -@pytest.fixture(name="library_tvshows_all", scope="session") +@pytest.fixture(name="library_tvshows_all", scope="package") def library_tvshows_all_fixture(): """Load payload for all items in the tvshows library and return it.""" return load_fixture("plex/library_tvshows_all.xml") -@pytest.fixture(name="library_tvshows_metadata", scope="session") +@pytest.fixture(name="library_tvshows_metadata", scope="package") def library_tvshows_metadata_fixture(): """Load payload for metadata in the TV shows library and return it.""" return load_fixture("plex/library_tvshows_metadata.xml") -@pytest.fixture(name="library_tvshows_collections", scope="session") +@pytest.fixture(name="library_tvshows_collections", scope="package") def library_tvshows_collections_fixture(): """Load payload for collections in the TV shows library and return it.""" return load_fixture("plex/library_tvshows_collections.xml") -@pytest.fixture(name="library_music_all", scope="session") +@pytest.fixture(name="library_music_all", scope="package") def library_music_all_fixture(): """Load payload for all items in the music library and return it.""" return load_fixture("plex/library_music_all.xml") -@pytest.fixture(name="library_music_metadata", scope="session") +@pytest.fixture(name="library_music_metadata", scope="package") def library_music_metadata_fixture(): """Load payload for metadata in the music library and return it.""" return load_fixture("plex/library_music_metadata.xml") -@pytest.fixture(name="library_music_collections", scope="session") +@pytest.fixture(name="library_music_collections", scope="package") def library_music_collections_fixture(): """Load payload for collections in the music library and return it.""" return load_fixture("plex/library_music_collections.xml") -@pytest.fixture(name="library_movies_sort", scope="session") +@pytest.fixture(name="library_movies_sort", scope="package") def library_movies_sort_fixture(): """Load sorting payload for movie library and return it.""" return load_fixture("plex/library_movies_sort.xml") -@pytest.fixture(name="library_tvshows_sort", scope="session") +@pytest.fixture(name="library_tvshows_sort", scope="package") def library_tvshows_sort_fixture(): """Load sorting payload for tvshow library and return it.""" return load_fixture("plex/library_tvshows_sort.xml") -@pytest.fixture(name="library_music_sort", scope="session") +@pytest.fixture(name="library_music_sort", scope="package") def library_music_sort_fixture(): """Load sorting payload for music library and return it.""" return load_fixture("plex/library_music_sort.xml") -@pytest.fixture(name="library_movies_filtertypes", scope="session") +@pytest.fixture(name="library_movies_filtertypes", scope="package") def library_movies_filtertypes_fixture(): """Load filtertypes payload for movie library and return it.""" return load_fixture("plex/library_movies_filtertypes.xml") -@pytest.fixture(name="library", scope="session") +@pytest.fixture(name="library", scope="package") def library_fixture(): """Load library payload and return it.""" return load_fixture("plex/library.xml") -@pytest.fixture(name="library_movies_size", scope="session") +@pytest.fixture(name="library_movies_size", scope="package") def library_movies_size_fixture(): """Load movie library size payload and return it.""" return load_fixture("plex/library_movies_size.xml") -@pytest.fixture(name="library_music_size", scope="session") +@pytest.fixture(name="library_music_size", scope="package") def library_music_size_fixture(): """Load music library size payload and return it.""" return load_fixture("plex/library_music_size.xml") -@pytest.fixture(name="library_tvshows_size", scope="session") +@pytest.fixture(name="library_tvshows_size", scope="package") def library_tvshows_size_fixture(): """Load tvshow library size payload and return it.""" return load_fixture("plex/library_tvshows_size.xml") -@pytest.fixture(name="library_tvshows_size_episodes", scope="session") +@pytest.fixture(name="library_tvshows_size_episodes", scope="package") def library_tvshows_size_episodes_fixture(): """Load tvshow library size in episodes payload and return it.""" return load_fixture("plex/library_tvshows_size_episodes.xml") -@pytest.fixture(name="library_tvshows_size_seasons", scope="session") +@pytest.fixture(name="library_tvshows_size_seasons", scope="package") def library_tvshows_size_seasons_fixture(): """Load tvshow library size in seasons payload and return it.""" return load_fixture("plex/library_tvshows_size_seasons.xml") -@pytest.fixture(name="library_sections", scope="session") +@pytest.fixture(name="library_sections", scope="package") def library_sections_fixture(): """Load library sections payload and return it.""" return load_fixture("plex/library_sections.xml") -@pytest.fixture(name="media_1", scope="session") +@pytest.fixture(name="media_1", scope="package") def media_1_fixture(): """Load media payload for item 1 and return it.""" return load_fixture("plex/media_1.xml") -@pytest.fixture(name="media_30", scope="session") +@pytest.fixture(name="media_30", scope="package") def media_30_fixture(): """Load media payload for item 30 and return it.""" return load_fixture("plex/media_30.xml") -@pytest.fixture(name="media_100", scope="session") +@pytest.fixture(name="media_100", scope="package") def media_100_fixture(): """Load media payload for item 100 and return it.""" return load_fixture("plex/media_100.xml") -@pytest.fixture(name="media_200", scope="session") +@pytest.fixture(name="media_200", scope="package") def media_200_fixture(): """Load media payload for item 200 and return it.""" return load_fixture("plex/media_200.xml") -@pytest.fixture(name="player_plexweb_resources", scope="session") +@pytest.fixture(name="player_plexweb_resources", scope="package") def player_plexweb_resources_fixture(): """Load resources payload for a Plex Web player and return it.""" return load_fixture("plex/player_plexweb_resources.xml") -@pytest.fixture(name="player_plexhtpc_resources", scope="session") +@pytest.fixture(name="player_plexhtpc_resources", scope="package") def player_plexhtpc_resources_fixture(): """Load resources payload for a Plex HTPC player and return it.""" return load_fixture("plex/player_plexhtpc_resources.xml") -@pytest.fixture(name="playlists", scope="session") +@pytest.fixture(name="playlists", scope="package") def playlists_fixture(): """Load payload for all playlists and return it.""" return load_fixture("plex/playlists.xml") -@pytest.fixture(name="playlist_500", scope="session") +@pytest.fixture(name="playlist_500", scope="package") def playlist_500_fixture(): """Load payload for playlist 500 and return it.""" return load_fixture("plex/playlist_500.xml") -@pytest.fixture(name="playqueue_created", scope="session") +@pytest.fixture(name="playqueue_created", scope="package") def playqueue_created_fixture(): """Load payload for playqueue creation response and return it.""" return load_fixture("plex/playqueue_created.xml") -@pytest.fixture(name="playqueue_1234", scope="session") +@pytest.fixture(name="playqueue_1234", scope="package") def playqueue_1234_fixture(): """Load payload for playqueue 1234 and return it.""" return load_fixture("plex/playqueue_1234.xml") -@pytest.fixture(name="plex_server_accounts", scope="session") +@pytest.fixture(name="plex_server_accounts", scope="package") def plex_server_accounts_fixture(): """Load payload accounts on the Plex server and return it.""" return load_fixture("plex/plex_server_accounts.xml") -@pytest.fixture(name="plex_server_base", scope="session") +@pytest.fixture(name="plex_server_base", scope="package") def plex_server_base_fixture(): """Load base payload for Plex server info and return it.""" return load_fixture("plex/plex_server_base.xml") -@pytest.fixture(name="plex_server_default", scope="session") +@pytest.fixture(name="plex_server_default", scope="package") def plex_server_default_fixture(plex_server_base): """Load default payload for Plex server info and return it.""" return plex_server_base.format( @@ -283,133 +283,133 @@ def plex_server_default_fixture(plex_server_base): ) -@pytest.fixture(name="plex_server_clients", scope="session") +@pytest.fixture(name="plex_server_clients", scope="package") def plex_server_clients_fixture(): """Load available clients payload for Plex server and return it.""" return load_fixture("plex/plex_server_clients.xml") -@pytest.fixture(name="plextv_account", scope="session") +@pytest.fixture(name="plextv_account", scope="package") def plextv_account_fixture(): """Load account info from plex.tv and return it.""" return load_fixture("plex/plextv_account.xml") -@pytest.fixture(name="plextv_resources", scope="session") +@pytest.fixture(name="plextv_resources", scope="package") def plextv_resources_fixture(): """Load single-server payload for plex.tv resources and return it.""" return load_fixture("plex/plextv_resources_one_server.xml") -@pytest.fixture(name="plextv_resources_two_servers", scope="session") +@pytest.fixture(name="plextv_resources_two_servers", scope="package") def plextv_resources_two_servers_fixture(): """Load two-server payload for plex.tv resources and return it.""" return load_fixture("plex/plextv_resources_two_servers.xml") -@pytest.fixture(name="plextv_shared_users", scope="session") +@pytest.fixture(name="plextv_shared_users", scope="package") def plextv_shared_users_fixture(): """Load payload for plex.tv shared users and return it.""" return load_fixture("plex/plextv_shared_users.xml") -@pytest.fixture(name="session_base", scope="session") +@pytest.fixture(name="session_base", scope="package") def session_base_fixture(): """Load the base session payload and return it.""" return load_fixture("plex/session_base.xml") -@pytest.fixture(name="session_default", scope="session") +@pytest.fixture(name="session_default", scope="package") def session_default_fixture(session_base): """Load the default session payload and return it.""" return session_base.format(user_id=1) -@pytest.fixture(name="session_new_user", scope="session") +@pytest.fixture(name="session_new_user", scope="package") def session_new_user_fixture(session_base): """Load the new user session payload and return it.""" return session_base.format(user_id=1001) -@pytest.fixture(name="session_photo", scope="session") +@pytest.fixture(name="session_photo", scope="package") def session_photo_fixture(): """Load a photo session payload and return it.""" return load_fixture("plex/session_photo.xml") -@pytest.fixture(name="session_plexweb", scope="session") +@pytest.fixture(name="session_plexweb", scope="package") def session_plexweb_fixture(): """Load a Plex Web session payload and return it.""" return load_fixture("plex/session_plexweb.xml") -@pytest.fixture(name="session_transient", scope="session") +@pytest.fixture(name="session_transient", scope="package") def session_transient_fixture(): """Load a transient session payload and return it.""" return load_fixture("plex/session_transient.xml") -@pytest.fixture(name="session_unknown", scope="session") +@pytest.fixture(name="session_unknown", scope="package") def session_unknown_fixture(): """Load a hypothetical unknown session payload and return it.""" return load_fixture("plex/session_unknown.xml") -@pytest.fixture(name="session_live_tv", scope="session") +@pytest.fixture(name="session_live_tv", scope="package") def session_live_tv_fixture(): """Load a Live TV session payload and return it.""" return load_fixture("plex/session_live_tv.xml") -@pytest.fixture(name="livetv_sessions", scope="session") +@pytest.fixture(name="livetv_sessions", scope="package") def livetv_sessions_fixture(): """Load livetv/sessions payload and return it.""" return load_fixture("plex/livetv_sessions.xml") -@pytest.fixture(name="security_token", scope="session") +@pytest.fixture(name="security_token", scope="package") def security_token_fixture(): """Load a security token payload and return it.""" return load_fixture("plex/security_token.xml") -@pytest.fixture(name="show_seasons", scope="session") +@pytest.fixture(name="show_seasons", scope="package") def show_seasons_fixture(): """Load a show's seasons payload and return it.""" return load_fixture("plex/show_seasons.xml") -@pytest.fixture(name="sonos_resources", scope="session") +@pytest.fixture(name="sonos_resources", scope="package") def sonos_resources_fixture(): """Load Sonos resources payload and return it.""" return load_fixture("plex/sonos_resources.xml") -@pytest.fixture(name="hubs", scope="session") +@pytest.fixture(name="hubs", scope="package") def hubs_fixture(): """Load hubs resource payload and return it.""" return load_fixture("plex/hubs.xml") -@pytest.fixture(name="hubs_music_library", scope="session") +@pytest.fixture(name="hubs_music_library", scope="package") def hubs_music_library_fixture(): """Load music library hubs resource payload and return it.""" return load_fixture("plex/hubs_library_section.xml") -@pytest.fixture(name="update_check_nochange", scope="session") +@pytest.fixture(name="update_check_nochange", scope="package") def update_check_fixture_nochange() -> str: """Load a no-change update resource payload and return it.""" return load_fixture("plex/release_nochange.xml") -@pytest.fixture(name="update_check_new", scope="session") +@pytest.fixture(name="update_check_new", scope="package") def update_check_fixture_new() -> str: """Load a changed update resource payload and return it.""" return load_fixture("plex/release_new.xml") -@pytest.fixture(name="update_check_new_not_updatable", scope="session") +@pytest.fixture(name="update_check_new_not_updatable", scope="package") def update_check_fixture_new_not_updatable() -> str: """Load a changed update resource payload (not updatable) and return it.""" return load_fixture("plex/release_new_not_updatable.xml") diff --git a/tests/components/sensibo/conftest.py b/tests/components/sensibo/conftest.py index d98b19c3833..1c835cd8001 100644 --- a/tests/components/sensibo/conftest.py +++ b/tests/components/sensibo/conftest.py @@ -74,7 +74,7 @@ def load_json_from_fixture(load_data: str) -> SensiboData: return json_data -@pytest.fixture(name="load_data", scope="session") +@pytest.fixture(name="load_data", scope="package") def load_data_from_fixture() -> str: """Load fixture with fixture data and return.""" return load_fixture("data.json", "sensibo") diff --git a/tests/components/smhi/conftest.py b/tests/components/smhi/conftest.py index df6a81a223d..62da5207565 100644 --- a/tests/components/smhi/conftest.py +++ b/tests/components/smhi/conftest.py @@ -7,13 +7,13 @@ from homeassistant.components.smhi.const import DOMAIN from tests.common import load_fixture -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def api_response(): """Return an API response.""" return load_fixture("smhi.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def api_response_lack_data(): """Return an API response.""" return load_fixture("smhi_short.json", DOMAIN) diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 0eb9b497fbd..3da0dd5c983 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -587,7 +587,7 @@ def mock_get_source_ip(mock_get_source_ip): return mock_get_source_ip -@pytest.fixture(name="zgs_discovery", scope="session") +@pytest.fixture(name="zgs_discovery", scope="package") def zgs_discovery_fixture(): """Load ZoneGroupState discovery payload and return it.""" return load_fixture("sonos/zgs_discovery.xml") diff --git a/tests/components/soundtouch/conftest.py b/tests/components/soundtouch/conftest.py index c81d76072d7..5bfeeea5ec5 100644 --- a/tests/components/soundtouch/conftest.py +++ b/tests/components/soundtouch/conftest.py @@ -47,97 +47,97 @@ def device2_config() -> MockConfigEntry: ) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_info() -> str: """Load SoundTouch device 1 info response and return it.""" return load_fixture("soundtouch/device1_info.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_aux() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_aux.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_bluetooth() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_bluetooth.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_radio() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_radio.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_standby() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_standby.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_upnp() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_upnp.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_upnp_paused() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_upnp_paused.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_presets() -> str: """Load SoundTouch device 1 presets response and return it.""" return load_fixture("soundtouch/device1_presets.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_volume() -> str: """Load SoundTouch device 1 volume response and return it.""" return load_fixture("soundtouch/device1_volume.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_volume_muted() -> str: """Load SoundTouch device 1 volume response and return it.""" return load_fixture("soundtouch/device1_volume_muted.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_zone_master() -> str: """Load SoundTouch device 1 getZone response and return it.""" return load_fixture("soundtouch/device1_getZone_master.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_info() -> str: """Load SoundTouch device 2 info response and return it.""" return load_fixture("soundtouch/device2_info.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_volume() -> str: """Load SoundTouch device 2 volume response and return it.""" return load_fixture("soundtouch/device2_volume.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_now_playing_standby() -> str: """Load SoundTouch device 2 now_playing response and return it.""" return load_fixture("soundtouch/device2_now_playing_standby.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_zone_slave() -> str: """Load SoundTouch device 2 getZone response and return it.""" return load_fixture("soundtouch/device2_getZone_slave.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def zone_empty() -> str: """Load empty SoundTouch getZone response and return it.""" return load_fixture("soundtouch/getZone_empty.xml") diff --git a/tests/components/tradfri/conftest.py b/tests/components/tradfri/conftest.py index 9ddac769c1f..73cfea59ce1 100644 --- a/tests/components/tradfri/conftest.py +++ b/tests/components/tradfri/conftest.py @@ -96,13 +96,13 @@ def device( return device -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def air_purifier() -> str: """Return an air purifier response.""" return load_fixture("air_purifier.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def blind() -> str: """Return a blind response.""" return load_fixture("blind.json", DOMAIN) diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 816fc922411..211367a2922 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -56,7 +56,7 @@ async def load_config_entry( return (config_entry, client) -@pytest.fixture(name="load_json", scope="session") +@pytest.fixture(name="load_json", scope="package") def load_json_from_fixture() -> dict[str, Any]: """Load fixture with json data and return.""" diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index dbf7357d4a0..db92b89cf81 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -241,19 +241,19 @@ def create_backup_fixture(): # State fixtures -@pytest.fixture(name="controller_state", scope="session") +@pytest.fixture(name="controller_state", scope="package") def controller_state_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("zwave_js/controller_state.json")) -@pytest.fixture(name="controller_node_state", scope="session") +@pytest.fixture(name="controller_node_state", scope="package") def controller_node_state_fixture(): """Load the controller node state fixture data.""" return json.loads(load_fixture("zwave_js/controller_node_state.json")) -@pytest.fixture(name="version_state", scope="session") +@pytest.fixture(name="version_state", scope="package") def version_state_fixture(): """Load the version state fixture data.""" return { @@ -276,67 +276,67 @@ def log_config_state_fixture(): } -@pytest.fixture(name="config_entry_diagnostics", scope="session") +@pytest.fixture(name="config_entry_diagnostics", scope="package") def config_entry_diagnostics_fixture(): """Load the config entry diagnostics fixture data.""" return json.loads(load_fixture("zwave_js/config_entry_diagnostics.json")) -@pytest.fixture(name="config_entry_diagnostics_redacted", scope="session") +@pytest.fixture(name="config_entry_diagnostics_redacted", scope="package") def config_entry_diagnostics_redacted_fixture(): """Load the redacted config entry diagnostics fixture data.""" return json.loads(load_fixture("zwave_js/config_entry_diagnostics_redacted.json")) -@pytest.fixture(name="multisensor_6_state", scope="session") +@pytest.fixture(name="multisensor_6_state", scope="package") def multisensor_6_state_fixture(): """Load the multisensor 6 node state fixture data.""" return json.loads(load_fixture("zwave_js/multisensor_6_state.json")) -@pytest.fixture(name="ecolink_door_sensor_state", scope="session") +@pytest.fixture(name="ecolink_door_sensor_state", scope="package") def ecolink_door_sensor_state_fixture(): """Load the Ecolink Door/Window Sensor node state fixture data.""" return json.loads(load_fixture("zwave_js/ecolink_door_sensor_state.json")) -@pytest.fixture(name="hank_binary_switch_state", scope="session") +@pytest.fixture(name="hank_binary_switch_state", scope="package") def binary_switch_state_fixture(): """Load the hank binary switch node state fixture data.""" return json.loads(load_fixture("zwave_js/hank_binary_switch_state.json")) -@pytest.fixture(name="bulb_6_multi_color_state", scope="session") +@pytest.fixture(name="bulb_6_multi_color_state", scope="package") def bulb_6_multi_color_state_fixture(): """Load the bulb 6 multi-color node state fixture data.""" return json.loads(load_fixture("zwave_js/bulb_6_multi_color_state.json")) -@pytest.fixture(name="light_color_null_values_state", scope="session") +@pytest.fixture(name="light_color_null_values_state", scope="package") def light_color_null_values_state_fixture(): """Load the light color null values node state fixture data.""" return json.loads(load_fixture("zwave_js/light_color_null_values_state.json")) -@pytest.fixture(name="eaton_rf9640_dimmer_state", scope="session") +@pytest.fixture(name="eaton_rf9640_dimmer_state", scope="package") def eaton_rf9640_dimmer_state_fixture(): """Load the eaton rf9640 dimmer node state fixture data.""" return json.loads(load_fixture("zwave_js/eaton_rf9640_dimmer_state.json")) -@pytest.fixture(name="lock_schlage_be469_state", scope="session") +@pytest.fixture(name="lock_schlage_be469_state", scope="package") def lock_schlage_be469_state_fixture(): """Load the schlage lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_schlage_be469_state.json")) -@pytest.fixture(name="lock_august_asl03_state", scope="session") +@pytest.fixture(name="lock_august_asl03_state", scope="package") def lock_august_asl03_state_fixture(): """Load the August Pro lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_august_asl03_state.json")) -@pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="session") +@pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="package") def climate_radio_thermostat_ct100_plus_state_fixture(): """Load the climate radio thermostat ct100 plus node state fixture data.""" return json.loads( @@ -346,7 +346,7 @@ def climate_radio_thermostat_ct100_plus_state_fixture(): @pytest.fixture( name="climate_radio_thermostat_ct100_plus_different_endpoints_state", - scope="session", + scope="package", ) def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): """Load the thermostat fixture state with values on different endpoints. @@ -360,13 +360,13 @@ def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): ) -@pytest.fixture(name="climate_adc_t3000_state", scope="session") +@pytest.fixture(name="climate_adc_t3000_state", scope="package") def climate_adc_t3000_state_fixture(): """Load the climate ADC-T3000 node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_adc_t3000_state.json")) -@pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="session") +@pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="package") def climate_airzone_aidoo_control_hvac_unit_state_fixture(): """Load the climate Airzone Aidoo Control HVAC Unit state fixture data.""" return json.loads( @@ -374,37 +374,37 @@ def climate_airzone_aidoo_control_hvac_unit_state_fixture(): ) -@pytest.fixture(name="climate_danfoss_lc_13_state", scope="session") +@pytest.fixture(name="climate_danfoss_lc_13_state", scope="package") def climate_danfoss_lc_13_state_fixture(): """Load Danfoss (LC-13) electronic radiator thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_danfoss_lc_13_state.json")) -@pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="session") +@pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="package") def climate_eurotronic_spirit_z_state_fixture(): """Load the climate Eurotronic Spirit Z thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_eurotronic_spirit_z_state.json")) -@pytest.fixture(name="climate_heatit_z_trm6_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm6_state", scope="package") def climate_heatit_z_trm6_state_fixture(): """Load the climate HEATIT Z-TRM6 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm6_state.json")) -@pytest.fixture(name="climate_heatit_z_trm3_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm3_state", scope="package") def climate_heatit_z_trm3_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm3_state.json")) -@pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="package") def climate_heatit_z_trm2fx_state_fixture(): """Load the climate HEATIT Z-TRM2fx thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm2fx_state.json")) -@pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="package") def climate_heatit_z_trm3_no_value_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node w/no value state fixture data.""" return json.loads( @@ -412,134 +412,134 @@ def climate_heatit_z_trm3_no_value_state_fixture(): ) -@pytest.fixture(name="nortek_thermostat_state", scope="session") +@pytest.fixture(name="nortek_thermostat_state", scope="package") def nortek_thermostat_state_fixture(): """Load the nortek thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/nortek_thermostat_state.json")) -@pytest.fixture(name="srt321_hrt4_zw_state", scope="session") +@pytest.fixture(name="srt321_hrt4_zw_state", scope="package") def srt321_hrt4_zw_state_fixture(): """Load the climate HRT4-ZW / SRT321 / SRT322 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/srt321_hrt4_zw_state.json")) -@pytest.fixture(name="chain_actuator_zws12_state", scope="session") +@pytest.fixture(name="chain_actuator_zws12_state", scope="package") def window_cover_state_fixture(): """Load the window cover node state fixture data.""" return json.loads(load_fixture("zwave_js/chain_actuator_zws12_state.json")) -@pytest.fixture(name="fan_generic_state", scope="session") +@pytest.fixture(name="fan_generic_state", scope="package") def fan_generic_state_fixture(): """Load the fan node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_generic_state.json")) -@pytest.fixture(name="hs_fc200_state", scope="session") +@pytest.fixture(name="hs_fc200_state", scope="package") def hs_fc200_state_fixture(): """Load the HS FC200+ node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_hs_fc200_state.json")) -@pytest.fixture(name="leviton_zw4sf_state", scope="session") +@pytest.fixture(name="leviton_zw4sf_state", scope="package") def leviton_zw4sf_state_fixture(): """Load the Leviton ZW4SF node state fixture data.""" return json.loads(load_fixture("zwave_js/leviton_zw4sf_state.json")) -@pytest.fixture(name="fan_honeywell_39358_state", scope="session") +@pytest.fixture(name="fan_honeywell_39358_state", scope="package") def fan_honeywell_39358_state_fixture(): """Load the fan node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_honeywell_39358_state.json")) -@pytest.fixture(name="gdc_zw062_state", scope="session") +@pytest.fixture(name="gdc_zw062_state", scope="package") def motorized_barrier_cover_state_fixture(): """Load the motorized barrier cover node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_zw062_state.json")) -@pytest.fixture(name="iblinds_v2_state", scope="session") +@pytest.fixture(name="iblinds_v2_state", scope="package") def iblinds_v2_state_fixture(): """Load the iBlinds v2 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_iblinds_v2_state.json")) -@pytest.fixture(name="iblinds_v3_state", scope="session") +@pytest.fixture(name="iblinds_v3_state", scope="package") def iblinds_v3_state_fixture(): """Load the iBlinds v3 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) -@pytest.fixture(name="qubino_shutter_state", scope="session") +@pytest.fixture(name="qubino_shutter_state", scope="package") def qubino_shutter_state_fixture(): """Load the Qubino Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_qubino_shutter_state.json")) -@pytest.fixture(name="aeotec_nano_shutter_state", scope="session") +@pytest.fixture(name="aeotec_nano_shutter_state", scope="package") def aeotec_nano_shutter_state_fixture(): """Load the Aeotec Nano Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_aeotec_nano_shutter_state.json")) -@pytest.fixture(name="fibaro_fgr222_shutter_state", scope="session") +@pytest.fixture(name="fibaro_fgr222_shutter_state", scope="package") def fibaro_fgr222_shutter_state_fixture(): """Load the Fibaro FGR222 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_fibaro_fgr222_state.json")) -@pytest.fixture(name="fibaro_fgr223_shutter_state", scope="session") +@pytest.fixture(name="fibaro_fgr223_shutter_state", scope="package") def fibaro_fgr223_shutter_state_fixture(): """Load the Fibaro FGR223 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_fibaro_fgr223_state.json")) -@pytest.fixture(name="merten_507801_state", scope="session") +@pytest.fixture(name="merten_507801_state", scope="package") def merten_507801_state_fixture(): """Load the Merten 507801 Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_merten_507801_state.json")) -@pytest.fixture(name="aeon_smart_switch_6_state", scope="session") +@pytest.fixture(name="aeon_smart_switch_6_state", scope="package") def aeon_smart_switch_6_state_fixture(): """Load the AEON Labs (ZW096) Smart Switch 6 node state fixture data.""" return json.loads(load_fixture("zwave_js/aeon_smart_switch_6_state.json")) -@pytest.fixture(name="ge_12730_state", scope="session") +@pytest.fixture(name="ge_12730_state", scope="package") def ge_12730_state_fixture(): """Load the GE 12730 node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_ge_12730_state.json")) -@pytest.fixture(name="aeotec_radiator_thermostat_state", scope="session") +@pytest.fixture(name="aeotec_radiator_thermostat_state", scope="package") def aeotec_radiator_thermostat_state_fixture(): """Load the Aeotec Radiator Thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/aeotec_radiator_thermostat_state.json")) -@pytest.fixture(name="inovelli_lzw36_state", scope="session") +@pytest.fixture(name="inovelli_lzw36_state", scope="package") def inovelli_lzw36_state_fixture(): """Load the Inovelli LZW36 node state fixture data.""" return json.loads(load_fixture("zwave_js/inovelli_lzw36_state.json")) -@pytest.fixture(name="null_name_check_state", scope="session") +@pytest.fixture(name="null_name_check_state", scope="package") def null_name_check_state_fixture(): """Load the null name check node state fixture data.""" return json.loads(load_fixture("zwave_js/null_name_check_state.json")) -@pytest.fixture(name="lock_id_lock_as_id150_state", scope="session") +@pytest.fixture(name="lock_id_lock_as_id150_state", scope="package") def lock_id_lock_as_id150_state_fixture(): """Load the id lock id-150 lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_id_lock_as_id150_state.json")) @pytest.fixture( - name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="session" + name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package" ) def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): """Load the climate multiple temp units node state fixture data.""" @@ -554,7 +554,7 @@ def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): name=( "climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state" ), - scope="session", + scope="package", ) def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture(): """Load climate device w/ mode+setpoint on diff endpoints node state fixture data.""" @@ -565,37 +565,37 @@ def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_stat ) -@pytest.fixture(name="vision_security_zl7432_state", scope="session") +@pytest.fixture(name="vision_security_zl7432_state", scope="package") def vision_security_zl7432_state_fixture(): """Load the vision security zl7432 switch node state fixture data.""" return json.loads(load_fixture("zwave_js/vision_security_zl7432_state.json")) -@pytest.fixture(name="zen_31_state", scope="session") +@pytest.fixture(name="zen_31_state", scope="package") def zem_31_state_fixture(): """Load the zen_31 node state fixture data.""" return json.loads(load_fixture("zwave_js/zen_31_state.json")) -@pytest.fixture(name="wallmote_central_scene_state", scope="session") +@pytest.fixture(name="wallmote_central_scene_state", scope="package") def wallmote_central_scene_state_fixture(): """Load the wallmote central scene node state fixture data.""" return json.loads(load_fixture("zwave_js/wallmote_central_scene_state.json")) -@pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="session") +@pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="package") def ge_in_wall_dimmer_switch_state_fixture(): """Load the ge in-wall dimmer switch node state fixture data.""" return json.loads(load_fixture("zwave_js/ge_in_wall_dimmer_switch_state.json")) -@pytest.fixture(name="aeotec_zw164_siren_state", scope="session") +@pytest.fixture(name="aeotec_zw164_siren_state", scope="package") def aeotec_zw164_siren_state_fixture(): """Load the aeotec zw164 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/aeotec_zw164_siren_state.json")) -@pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="session") +@pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="package") def lock_popp_electric_strike_lock_control_state_fixture(): """Load the popp electric strike lock control node state fixture data.""" return json.loads( @@ -603,73 +603,73 @@ def lock_popp_electric_strike_lock_control_state_fixture(): ) -@pytest.fixture(name="fortrezz_ssa1_siren_state", scope="session") +@pytest.fixture(name="fortrezz_ssa1_siren_state", scope="package") def fortrezz_ssa1_siren_state_fixture(): """Load the fortrezz ssa1 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/fortrezz_ssa1_siren_state.json")) -@pytest.fixture(name="fortrezz_ssa3_siren_state", scope="session") +@pytest.fixture(name="fortrezz_ssa3_siren_state", scope="package") def fortrezz_ssa3_siren_state_fixture(): """Load the fortrezz ssa3 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/fortrezz_ssa3_siren_state.json")) -@pytest.fixture(name="zp3111_not_ready_state", scope="session") +@pytest.fixture(name="zp3111_not_ready_state", scope="package") def zp3111_not_ready_state_fixture(): """Load the zp3111 4-in-1 sensor not-ready node state fixture data.""" return json.loads(load_fixture("zwave_js/zp3111-5_not_ready_state.json")) -@pytest.fixture(name="zp3111_state", scope="session") +@pytest.fixture(name="zp3111_state", scope="package") def zp3111_state_fixture(): """Load the zp3111 4-in-1 sensor node state fixture data.""" return json.loads(load_fixture("zwave_js/zp3111-5_state.json")) -@pytest.fixture(name="express_controls_ezmultipli_state", scope="session") +@pytest.fixture(name="express_controls_ezmultipli_state", scope="package") def light_express_controls_ezmultipli_state_fixture(): """Load the Express Controls EZMultiPli node state fixture data.""" return json.loads(load_fixture("zwave_js/express_controls_ezmultipli_state.json")) -@pytest.fixture(name="lock_home_connect_620_state", scope="session") +@pytest.fixture(name="lock_home_connect_620_state", scope="package") def lock_home_connect_620_state_fixture(): """Load the Home Connect 620 lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_home_connect_620_state.json")) -@pytest.fixture(name="switch_zooz_zen72_state", scope="session") +@pytest.fixture(name="switch_zooz_zen72_state", scope="package") def switch_zooz_zen72_state_fixture(): """Load the Zooz Zen72 switch node state fixture data.""" return json.loads(load_fixture("zwave_js/switch_zooz_zen72_state.json")) -@pytest.fixture(name="indicator_test_state", scope="session") +@pytest.fixture(name="indicator_test_state", scope="package") def indicator_test_state_fixture(): """Load the indicator CC test node state fixture data.""" return json.loads(load_fixture("zwave_js/indicator_test_state.json")) -@pytest.fixture(name="energy_production_state", scope="session") +@pytest.fixture(name="energy_production_state", scope="package") def energy_production_state_fixture(): """Load a mock node with energy production CC state fixture data.""" return json.loads(load_fixture("zwave_js/energy_production_state.json")) -@pytest.fixture(name="nice_ibt4zwave_state", scope="session") +@pytest.fixture(name="nice_ibt4zwave_state", scope="package") def nice_ibt4zwave_state_fixture(): """Load a Nice IBT4ZWAVE cover node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_nice_ibt4zwave_state.json")) -@pytest.fixture(name="logic_group_zdb5100_state", scope="session") +@pytest.fixture(name="logic_group_zdb5100_state", scope="package") def logic_group_zdb5100_state_fixture(): """Load the Logic Group ZDB5100 node state fixture data.""" return json.loads(load_fixture("zwave_js/logic_group_zdb5100_state.json")) -@pytest.fixture(name="central_scene_node_state", scope="session") +@pytest.fixture(name="central_scene_node_state", scope="package") def central_scene_node_state_fixture(): """Load node with Central Scene CC node state fixture data.""" return json.loads(load_fixture("zwave_js/central_scene_node_state.json")) From 764b34ab62564e2adb91c9a8c8ee64497597d99b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 26 Apr 2024 05:00:07 +0200 Subject: [PATCH 423/426] Reduce scope of bootstrap test fixture to module (#116195) --- tests/test_bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 2e35e4ffddb..96caf5d10c8 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -44,7 +44,7 @@ async def apply_stop_hass(stop_hass: None) -> None: """Make sure all hass are stopped.""" -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="module", autouse=True) def mock_http_start_stop() -> Generator[None, None, None]: """Mock HTTP start and stop.""" with ( From 623d34e1e0ed6307cb2b1c74015e1d91fcb9b117 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 26 Apr 2024 08:38:24 +0200 Subject: [PATCH 424/426] Remove early return when validating entity registry items (#116160) --- homeassistant/helpers/entity_registry.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index 4e77df49ea6..436fc5a18de 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -636,7 +636,6 @@ def _validate_item( unique_id, report_issue, ) - return if ( disabled_by and disabled_by is not UNDEFINED From e662e3b65c98f89819db3768e747c4fd84c0724f Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 26 Apr 2024 08:48:32 +0200 Subject: [PATCH 425/426] Bump ruff to 0.4.2 (#116201) * Bump ruff to 0.4.2 * review comments --- .pre-commit-config.yaml | 2 +- .../components/emoncms_history/__init__.py | 4 +- .../components/lamarzocco/coordinator.py | 2 +- homeassistant/components/nest/media_source.py | 10 ++-- homeassistant/components/netio/switch.py | 2 +- .../components/rss_feed_template/__init__.py | 4 +- homeassistant/components/stream/worker.py | 4 +- homeassistant/components/tedee/coordinator.py | 4 +- homeassistant/components/tedee/lock.py | 6 +-- homeassistant/components/verisure/lock.py | 2 +- homeassistant/helpers/device_registry.py | 2 +- homeassistant/helpers/entity.py | 2 +- homeassistant/util/uuid.py | 2 +- pyproject.toml | 2 +- requirements_test_pre_commit.txt | 2 +- tests/components/freebox/conftest.py | 3 +- tests/components/nest/test_media_source.py | 50 ++++++++---------- tests/components/rainbird/conftest.py | 2 +- tests/conftest.py | 6 +-- tests/helpers/test_template.py | 52 +++++++++++++------ 20 files changed, 86 insertions(+), 77 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ceb8ee7f9c4..40757c09e95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.1 + rev: v0.4.2 hooks: - id: ruff args: diff --git a/homeassistant/components/emoncms_history/__init__.py b/homeassistant/components/emoncms_history/__init__.py index ab3f2671b99..7de3a4f2ef8 100644 --- a/homeassistant/components/emoncms_history/__init__.py +++ b/homeassistant/components/emoncms_history/__init__.py @@ -86,8 +86,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: continue if payload_dict: - payload = "{%s}" % ",".join( - f"{key}:{val}" for key, val in payload_dict.items() + payload = "{{{}}}".format( + ",".join(f"{key}:{val}" for key, val in payload_dict.items()) ) send_data( diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 7901b0bb3fa..412fe9ee3ce 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -147,7 +147,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): raise ConfigEntryAuthFailed(msg) from ex except RequestNotSuccessful as ex: _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed("Querying API failed. Error: %s" % ex) from ex + raise UpdateFailed(f"Querying API failed. Error: {ex}") from ex def async_get_ble_device(self) -> BLEDevice | None: """Get a Bleak Client for the machine.""" diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index d48006c449d..6c481806e4f 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -322,7 +322,7 @@ class NestMediaSource(MediaSource): devices = async_get_media_source_devices(self.hass) if not (device := devices.get(media_id.device_id)): raise Unresolvable( - "Unable to find device with identifier: %s" % item.identifier + f"Unable to find device with identifier: {item.identifier}" ) if not media_id.event_token: # The device resolves to the most recent event if available @@ -330,7 +330,7 @@ class NestMediaSource(MediaSource): last_event_id := await _async_get_recent_event_id(media_id, device) ): raise Unresolvable( - "Unable to resolve recent event for device: %s" % item.identifier + f"Unable to resolve recent event for device: {item.identifier}" ) media_id = last_event_id @@ -377,7 +377,7 @@ class NestMediaSource(MediaSource): # Browse either a device or events within a device if not (device := devices.get(media_id.device_id)): raise BrowseError( - "Unable to find device with identiifer: %s" % item.identifier + f"Unable to find device with identiifer: {item.identifier}" ) # Clip previews are a session with multiple possible event types (e.g. # person, motion, etc) and a single mp4 @@ -399,7 +399,7 @@ class NestMediaSource(MediaSource): # Browse a specific event if not (single_clip := clips.get(media_id.event_token)): raise BrowseError( - "Unable to find event with identiifer: %s" % item.identifier + f"Unable to find event with identiifer: {item.identifier}" ) return _browse_clip_preview(media_id, device, single_clip) @@ -419,7 +419,7 @@ class NestMediaSource(MediaSource): # Browse a specific event if not (single_image := images.get(media_id.event_token)): raise BrowseError( - "Unable to find event with identiifer: %s" % item.identifier + f"Unable to find event with identiifer: {item.identifier}" ) return _browse_image_event(media_id, device, single_image) diff --git a/homeassistant/components/netio/switch.py b/homeassistant/components/netio/switch.py index 0f0c85c1720..4cc77e44ec4 100644 --- a/homeassistant/components/netio/switch.py +++ b/homeassistant/components/netio/switch.py @@ -165,7 +165,7 @@ class NetioSwitch(SwitchEntity): def _set(self, value): val = list("uuuu") val[int(self.outlet) - 1] = "1" if value else "0" - self.netio.get("port list %s" % "".join(val)) + self.netio.get("port list {}".format("".join(val))) self.netio.states[int(self.outlet) - 1] = value self.schedule_update_ha_state() diff --git a/homeassistant/components/rss_feed_template/__init__.py b/homeassistant/components/rss_feed_template/__init__.py index 8d2e47315ef..debff5a6e96 100644 --- a/homeassistant/components/rss_feed_template/__init__.py +++ b/homeassistant/components/rss_feed_template/__init__.py @@ -91,9 +91,7 @@ class RssView(HomeAssistantView): response += '\n' response += " \n" if self._title is not None: - response += " %s\n" % escape( - self._title.async_render(parse_result=False) - ) + response += f" {escape(self._title.async_render(parse_result=False))}\n" else: response += " Home Assistant\n" diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 670d6b93c0e..956c93d01a0 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -592,7 +592,7 @@ def stream_worker( except av.AVError as ex: container.close() raise StreamWorkerError( - "Error demuxing stream while finding first packet: %s" % str(ex) + f"Error demuxing stream while finding first packet: {str(ex)}" ) from ex muxer = StreamMuxer( @@ -617,7 +617,7 @@ def stream_worker( except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex except av.AVError as ex: - raise StreamWorkerError("Error demuxing stream: %s" % str(ex)) from ex + raise StreamWorkerError(f"Error demuxing stream: {str(ex)}") from ex muxer.mux_packet(packet) diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index f3043b1d78d..069a7893974 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -100,9 +100,9 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): except TedeeDataUpdateException as ex: _LOGGER.debug("Error while updating data: %s", str(ex)) - raise UpdateFailed("Error while updating data: %s" % str(ex)) from ex + raise UpdateFailed(f"Error while updating data: {str(ex)}") from ex except (TedeeClientException, TimeoutError) as ex: - raise UpdateFailed("Querying API failed. Error: %s" % str(ex)) from ex + raise UpdateFailed(f"Querying API failed. Error: {str(ex)}") from ex def _async_add_remove_locks(self) -> None: """Add new locks, remove non-existing locks.""" diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index a720652bcbc..1c47ff2a6c1 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -90,7 +90,7 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to unlock the door. Lock %s" % self._lock.lock_id + f"Failed to unlock the door. Lock {self._lock.lock_id}" ) from ex async def async_lock(self, **kwargs: Any) -> None: @@ -103,7 +103,7 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to lock the door. Lock %s" % self._lock.lock_id + f"Failed to lock the door. Lock {self._lock.lock_id}" ) from ex @@ -125,5 +125,5 @@ class TedeeLockWithLatchEntity(TedeeLockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to unlatch the door. Lock %s" % self._lock.lock_id + f"Failed to unlatch the door. Lock {self._lock.lock_id}" ) from ex diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index 227356a2525..da2bc2ced2b 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -112,7 +112,7 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt digits = self.coordinator.entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) - return "^\\d{%s}$" % digits + return f"^\\d{{{digits}}}$" @property def is_locked(self) -> bool: diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 00d0a0ba62f..aec5dbc6c4a 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -615,7 +615,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): return name.format(**translation_placeholders) except KeyError as err: if get_release_channel() is not ReleaseChannel.STABLE: - raise HomeAssistantError("Missing placeholder %s" % err) from err + raise HomeAssistantError(f"Missing placeholder {err}") from err report_issue = async_suggest_report_issue( self.hass, integration_domain=domain ) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index a91b4c32d21..a2fc16f8a82 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -660,7 +660,7 @@ class Entity( except KeyError as err: if not self._name_translation_placeholders_reported: if get_release_channel() is not ReleaseChannel.STABLE: - raise HomeAssistantError("Missing placeholder %s" % err) from err + raise HomeAssistantError(f"Missing placeholder {err}") from err report_issue = self._suggest_report_issue() _LOGGER.warning( ( diff --git a/homeassistant/util/uuid.py b/homeassistant/util/uuid.py index d924eab934d..b7e9c2ae4f8 100644 --- a/homeassistant/util/uuid.py +++ b/homeassistant/util/uuid.py @@ -9,4 +9,4 @@ def random_uuid_hex() -> str: This uuid should not be used for cryptographically secure operations. """ - return "%032x" % getrandbits(32 * 4) + return f"{getrandbits(32 * 4):032x}" diff --git a/pyproject.toml b/pyproject.toml index baf919c2da5..d3f2af6bbf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -659,7 +659,7 @@ filterwarnings = [ ] [tool.ruff] -required-version = ">=0.4.1" +required-version = ">=0.4.2" [tool.ruff.lint] select = [ diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 4f21f6d4a0c..05e98a945d2 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.2.6 -ruff==0.4.1 +ruff==0.4.2 yamllint==1.35.1 diff --git a/tests/components/freebox/conftest.py b/tests/components/freebox/conftest.py index cf520043755..2fe4e1b77de 100644 --- a/tests/components/freebox/conftest.py +++ b/tests/components/freebox/conftest.py @@ -108,8 +108,7 @@ def mock_router_bridge_mode(mock_device_registry_devices, router): router().lan.get_hosts_list = AsyncMock( side_effect=HttpRequestError( - "Request failed (APIResponse: %s)" - % json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE) + f"Request failed (APIResponse: {json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE)})" ) ) diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index def99633435..419b3648124 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -399,7 +399,7 @@ async def test_camera_event( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -572,7 +572,7 @@ async def test_multiple_image_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT + b"-2" @@ -585,7 +585,7 @@ async def test_multiple_image_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT + b"-1" @@ -673,7 +673,7 @@ async def test_multiple_clip_preview_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -685,7 +685,7 @@ async def test_multiple_clip_preview_events_in_session( assert media.mime_type == "video/mp4" response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -888,7 +888,7 @@ async def test_camera_event_clip_preview( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == mp4.getvalue() @@ -896,7 +896,7 @@ async def test_camera_event_clip_preview( response = await client.get( f"/api/nest/event_media/{device.id}/{event_identifier}/thumbnail" ) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" await response.read() # Animated gif format not tested @@ -907,9 +907,7 @@ async def test_event_media_render_invalid_device_id( await setup_platform() client = await hass_client() response = await client.get("/api/nest/event_media/invalid-device-id") - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_event_media_render_invalid_event_id( @@ -924,9 +922,7 @@ async def test_event_media_render_invalid_event_id( client = await hass_client() response = await client.get(f"/api/nest/event_media/{device.id}/invalid-event-id") - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_event_media_failure( @@ -981,9 +977,7 @@ async def test_event_media_failure( # Media is not available to be fetched client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_media_permission_unauthorized( @@ -1011,9 +1005,9 @@ async def test_media_permission_unauthorized( client = await hass_client() response = await client.get(media_url) - assert response.status == HTTPStatus.UNAUTHORIZED, ( - "Response not matched: %s" % response - ) + assert ( + response.status == HTTPStatus.UNAUTHORIZED + ), f"Response not matched: {response}" async def test_multiple_devices( @@ -1157,7 +1151,7 @@ async def test_media_store_persistence( # Fetch event media client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -1198,7 +1192,7 @@ async def test_media_store_persistence( # Verify media exists response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -1254,9 +1248,7 @@ async def test_media_store_save_filesystem_error( # We fail to retrieve the media from the server since the origin filesystem op failed client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_media_store_load_filesystem_error( @@ -1307,9 +1299,9 @@ async def test_media_store_load_filesystem_error( response = await client.get( f"/api/nest/event_media/{device.id}/{event_identifier}" ) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert ( + response.status == HTTPStatus.NOT_FOUND + ), f"Response not matched: {response}" @pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)]) @@ -1384,7 +1376,7 @@ async def test_camera_event_media_eviction( for i in reversed(range(3, 8)): child_event = next(child_events) response = await client.get(f"/api/nest/event_media/{child_event.identifier}") - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == f"image-bytes-{i}".encode() await hass.async_block_till_done() @@ -1444,7 +1436,7 @@ async def test_camera_image_resize( client = await hass_client() response = await client.get(browse.thumbnail) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT diff --git a/tests/components/rainbird/conftest.py b/tests/components/rainbird/conftest.py index 10101986007..59471f5eed4 100644 --- a/tests/components/rainbird/conftest.py +++ b/tests/components/rainbird/conftest.py @@ -187,7 +187,7 @@ def aioclient_mock(hass: HomeAssistant) -> Generator[AiohttpClientMocker, None, def rainbird_json_response(result: dict[str, str]) -> bytes: """Create a fake API response.""" return encryption.encrypt( - '{"jsonrpc": "2.0", "result": %s, "id": 1} ' % json.dumps(result), + f'{{"jsonrpc": "2.0", "result": {json.dumps(result)}, "id": 1}} ', PASSWORD, ) diff --git a/tests/conftest.py b/tests/conftest.py index 3a95e0e58b3..7efd4246a1f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -499,7 +499,7 @@ def aiohttp_client( elif isinstance(__param, BaseTestServer): client = TestClient(__param, loop=loop, **kwargs) else: - raise TypeError("Unknown argument type: %r" % type(__param)) + raise TypeError(f"Unknown argument type: {type(__param)!r}") await client.start_server() clients.append(client) @@ -542,8 +542,8 @@ async def hass( else: exceptions.append( Exception( - "Received exception handler without exception, but with message: %s" - % context["message"] + "Received exception handler without exception, " + f"but with message: {context["message"]}" ) ) orig_exception_handler(loop, context) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 1e2e512cf3d..ae9dcbe50d5 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -707,7 +707,7 @@ def test_multiply(hass: HomeAssistant) -> None: for inp, out in tests.items(): assert ( template.Template( - "{{ %s | multiply(10) | round }}" % inp, hass + f"{{{{ {inp} | multiply(10) | round }}}}", hass ).async_render() == out ) @@ -775,7 +775,9 @@ def test_sine(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | sin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | sin | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ sin({value}) | round(3) }}}}") == expected @@ -805,7 +807,9 @@ def test_cos(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | cos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | cos | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ cos({value}) | round(3) }}}}") == expected @@ -835,7 +839,9 @@ def test_tan(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | tan | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | tan | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ tan({value}) | round(3) }}}}") == expected @@ -865,7 +871,9 @@ def test_sqrt(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | sqrt | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | sqrt | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ sqrt({value}) | round(3) }}}}") == expected @@ -895,7 +903,9 @@ def test_arc_sine(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | asin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | asin | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ asin({value}) | round(3) }}}}") == expected @@ -909,7 +919,9 @@ def test_arc_sine(hass: HomeAssistant) -> None: for value in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | asin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | asin | round(3) }}}}", hass + ).async_render() with pytest.raises(TemplateError): assert render(hass, f"{{{{ asin({value}) | round(3) }}}}") @@ -932,7 +944,9 @@ def test_arc_cos(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | acos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | acos | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ acos({value}) | round(3) }}}}") == expected @@ -946,7 +960,9 @@ def test_arc_cos(hass: HomeAssistant) -> None: for value in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | acos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | acos | round(3) }}}}", hass + ).async_render() with pytest.raises(TemplateError): assert render(hass, f"{{{{ acos({value}) | round(3) }}}}") @@ -973,7 +989,9 @@ def test_arc_tan(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | atan | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | atan | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ atan({value}) | round(3) }}}}") == expected @@ -1122,7 +1140,7 @@ def test_timestamp_local(hass: HomeAssistant) -> None: for inp, out in tests: assert ( - template.Template("{{ %s | timestamp_local }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_local }}}}", hass).async_render() == out ) @@ -1133,7 +1151,7 @@ def test_timestamp_local(hass: HomeAssistant) -> None: for inp in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | timestamp_local }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_local }}}}", hass).async_render() # Test handling of default return value assert render(hass, "{{ None | timestamp_local(1) }}") == 1 @@ -1616,7 +1634,7 @@ def test_ordinal(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | ordinal }}" % value, hass).async_render() + template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() == expected ) @@ -1631,7 +1649,7 @@ def test_timestamp_utc(hass: HomeAssistant) -> None: for inp, out in tests: assert ( - template.Template("{{ %s | timestamp_utc }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_utc }}}}", hass).async_render() == out ) @@ -1642,7 +1660,7 @@ def test_timestamp_utc(hass: HomeAssistant) -> None: for inp in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | timestamp_utc }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_utc }}}}", hass).async_render() # Test handling of default return value assert render(hass, "{{ None | timestamp_utc(1) }}") == 1 @@ -4618,7 +4636,9 @@ def test_closest_function_invalid_state(hass: HomeAssistant) -> None: for state in ("states.zone.non_existing", '"zone.non_existing"'): assert ( - template.Template("{{ closest(%s, states) }}" % state, hass).async_render() + template.Template( + f"{{{{ closest({state}, states) }}}}", hass + ).async_render() is None ) From 49d8ac081154320ceabae3fb005c41f5e29447d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 08:50:58 +0200 Subject: [PATCH 426/426] Bump github/codeql-action from 3.25.2 to 3.25.3 (#116209) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.25.2 to 3.25.3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v3.25.2...v3.25.3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 399443d23fb..4f624c582d7 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.4 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.2 + uses: github/codeql-action/init@v3.25.3 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.2 + uses: github/codeql-action/analyze@v3.25.3 with: category: "/language:python"