Compare commits

...

97 Commits

Author SHA1 Message Date
Paulus Schoutsen
398c7be850 Merge pull request #69935 from home-assistant/rc 2022-04-12 16:19:07 -07:00
Paulus Schoutsen
25fc64a9e0 Guard against non http schemes (#69938) 2022-04-12 15:27:38 -07:00
Paulus Schoutsen
a543160070 Not all music are URLs (#69936) 2022-04-12 15:27:37 -07:00
rappenze
51bfe53444 Fix fibaro light state for rgb lights and HC3 (#69884) 2022-04-12 15:27:36 -07:00
Paulus Schoutsen
cc6afdba3c Bumped version to 2022.4.3 2022-04-12 14:14:13 -07:00
puddly
8a8ee3c732 Downgrade ZHA dependency zigpy-deconz from 0.15.0 to 0.14.0 (#69927) 2022-04-12 14:14:09 -07:00
Erik Montnemery
27721d5b84 Fix adjusting statistics in ft³ (#69913)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
2022-04-12 14:14:08 -07:00
Erik Montnemery
fee80a9d4a Fix adjusting 5-minute statistics (#69921) 2022-04-12 14:13:53 -07:00
Franck Nijhof
e49da79d1b Fix climate HVAC device condition (#69908) 2022-04-12 14:13:18 -07:00
epenet
ec541ca7ed Bump renault-api to 0.1.11 (#69900) 2022-04-12 14:13:17 -07:00
epenet
f5bb9e6047 Fix unique id in SamsungTV config flow (#69899)
* Fix unique id in SamsungTV config flow

* coverage

Co-authored-by: J. Nick Koston <nick@koston.org>
2022-04-12 14:13:17 -07:00
Joakim Sørensen
242bd921df Handle add-on issues (#69897) 2022-04-12 14:13:16 -07:00
puddly
ba16156a79 Bump zigpy to 0.44.2 and and zha-quirks to 0.0.72 (#69879) 2022-04-12 14:13:15 -07:00
starkillerOG
84d8a7857d Motion blinds fix set absolute position service (#69873) 2022-04-12 14:13:14 -07:00
Erik Montnemery
9607dfe57c Use quickplay when casting splash for mediaplayer.turn_on (#69866) 2022-04-12 14:13:14 -07:00
Allen Porter
aeb8dc2c07 Fix google calendar timestamp out of range (#69863) 2022-04-12 14:13:13 -07:00
Mick Vleeshouwer
71fb2d09b7 Fix #69694 (#69850) 2022-04-12 14:13:12 -07:00
Guido Schmitz
fd8fb59f7a Bump devolo-home-control-api to 0.18.1 (#69840) 2022-04-12 14:13:11 -07:00
David F. Mulcahey
49bf1d6bff Add diagnostics support for ZHA (#69756) 2022-04-12 14:13:10 -07:00
Raj Laud
8bd07bcff2 Handle Squeezebox media ids that are not URLs (#69696) 2022-04-12 14:13:10 -07:00
J. Nick Koston
85bc863830 Fix profiler object growth logging test (#69211) 2022-04-12 14:13:09 -07:00
Marvin Wichmann
094c185dee Update xknx to 0.20.2 (RC) (#69859) 2022-04-11 19:49:18 +02:00
Franck Nijhof
a1fddc3c4d Merge pull request #69835 from home-assistant/rc 2022-04-11 11:02:51 +02:00
Dave T
f6aead6773 Don't test config on yaml import for generic camera (#69714) 2022-04-10 23:15:04 -07:00
Paulus Schoutsen
2fad42ce06 Bumped version to 2022.4.2 2022-04-10 22:59:28 -07:00
J. Nick Koston
3e92659260 Downgrade av to 8.1.0 to fix memory leak (#69833) 2022-04-10 22:59:22 -07:00
jjlawren
02eec73644 Retry on more Plex connection failures during startup (#69822) 2022-04-10 22:59:21 -07:00
jjlawren
8e3e6efb21 Speed up Plex playback for multiple videos (#69821) 2022-04-10 22:59:20 -07:00
Raman Gupta
5d4c1d9fe4 Reduce API limit for tomorrow.io (#69818) 2022-04-10 22:59:20 -07:00
rikroe
2871ac4f8f Fix converting (value, unit) tuples if value is None (#69802)
Co-authored-by: rikroe <rikroe@users.noreply.github.com>
2022-04-10 22:59:19 -07:00
Michael Davie
506f8c1d94 Bump slixmpp to 1.8.2 (#69794) 2022-04-10 22:59:18 -07:00
Allen Porter
5c4df657b2 Bump rtsp-to-webrtc to 0.5.1 (#69776) 2022-04-10 22:59:17 -07:00
Allen Porter
16a1a93332 Handle expired credentials in reauth in google calendar initialization (#69772)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2022-04-10 22:59:16 -07:00
Maximilian
7c06514bb4 Upgrade pynina to 0.1.8 (#69771) 2022-04-10 22:59:15 -07:00
Christopher Bailey
0ebd9e093d Fix unifiprotect for 2.0.0-beta2 of UniFi Protect (#69762) 2022-04-10 22:59:15 -07:00
Mike Fugate
d9253fd310 Fix SleepIQ firmness number step and min values (#69757)
* fix sleepiq firmness number step and min values

* add asserts for min/max/step attributes
2022-04-10 22:59:14 -07:00
Malte Franken
0d7cbb8266 Bump aio_georss_gdacs to 0.7 (#69743) 2022-04-10 22:59:13 -07:00
J. Nick Koston
2ca8a0ef4a Increase tplink effects random seed allowed range to 1-600 (#69725)
* Increase tplink effects random seed allowed range to 1-600

Reported https://community.home-assistant.io/t/tp-link-integration-support-for-kl430-led-light-strip/190635/62?u=bdraco

* cover
2022-04-10 22:59:13 -07:00
Dave T
2c48f28f13 Support webp still image format in generic camera (#69718) 2022-04-10 22:59:12 -07:00
Allen Porter
2298a1fa70 Refresh google calendar tokens with invalid expiration times (#69679)
* Refresh google calendar tokens with invalid expiration times

* Update tests/components/google/conftest.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Remove unnecessary async methods in functions being touched already

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2022-04-10 22:59:11 -07:00
Shay Levy
87ba8a56ee Fix Shelly gen2 cover unavailable when not calibrated (#69671) 2022-04-10 22:59:10 -07:00
Francois Chagnon
39e4d3e63b Add None guard for zwave_js humidifier entity (#69667)
* Add None guard for humidifier entity is_on

* Add guards in more places

* Update homeassistant/components/zwave_js/humidifier.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2022-04-10 22:59:10 -07:00
epenet
269405aee0 Suppress Upnp parsing errors in SamsungTV (#69664) 2022-04-10 22:59:09 -07:00
KNXBroker
b1eda25ca3 Fix soundtouch service calls (#69655) 2022-04-10 22:59:08 -07:00
epenet
39e9270b79 Fix upnp subscription in SamsungTV (#69652)
Co-authored-by: J. Nick Koston <nick@koston.org>
2022-04-10 22:59:07 -07:00
starkillerOG
5a408d4083 Fix Netgear switch state update (#69597) 2022-04-10 22:59:07 -07:00
azrdev
509d6ffcb2 Update python-mpd2 to 3.0.5 (#69304) 2022-04-10 22:59:06 -07:00
Paulus Schoutsen
919f4dd719 Merge pull request #69509 from home-assistant/rc 2022-04-07 23:10:23 -07:00
Allen Porter
d9cbbd3b05 Fix bugs calendar oauth token date handling (#69641) 2022-04-07 21:53:56 -07:00
Matt Zimmerman
7e317bed3e [powerwall] Skip backup reserve sensor if data is unavailable (#69637) 2022-04-07 20:34:25 -07:00
David F. Mulcahey
8017cb274e Fix Samjin Multi acceleration in ZHA (#69636) 2022-04-07 20:28:15 -07:00
David F. Mulcahey
4d4eb5c850 Bump ZHA quirks to 0.0.71 (#69633) 2022-04-07 17:16:51 -07:00
puddly
1866e58ac5 Move new zha_event command parameters into a params key to ensure backwards compatibility (#69631) 2022-04-07 15:33:50 -07:00
north3221
b50a78d1d9 Fix tado default overlay for when set pre new overlay feature (#69584) 2022-04-07 15:23:25 -07:00
puddly
88a081be24 Fix ZHA group creation (#69629) 2022-04-07 15:05:11 -07:00
J. Nick Koston
3dd0ddb73e Mark backgrounds optional for tplink random effects (#69622) 2022-04-07 15:05:10 -07:00
Álvaro Fernández Rojas
9063428358 Update aioairzone to v0.3.3 (#69615) 2022-04-07 15:05:09 -07:00
Álvaro Fernández Rojas
ee06b2a1b5 Update aioairzone to v0.3.1 (#68975) 2022-04-07 15:05:08 -07:00
Diogo Gomes
62d67a4287 Fix utility_meter reset service (#69612) 2022-04-07 15:02:49 -07:00
Jason Hunter
0b2f0a9f7c Log which device has the time discrepancy (#69595) 2022-04-07 15:02:49 -07:00
Dave T
7803845af1 Generic fix stream thumbnail (#69378) 2022-04-07 15:02:48 -07:00
J. Nick Koston
2dd3dc2d2d Run energy db calls in the db executor (#69544)
Fixes #69537
2022-04-07 15:26:15 +02:00
J. Nick Koston
ceb8d86a7e Fix registered entities without a category not being exclude-able in the HomeKit UI (#69543) 2022-04-07 15:26:12 +02:00
Joakim Sørensen
e726ef662c Fix adding OS entities for supervised installations (#69539) 2022-04-07 15:26:08 +02:00
Allen Porter
8c9534d2ba Gracefully handle empty summary in google calendar (#69520)
Gracefully handle empty summary in google calendar matching the old behavior
before some code cleanup.
2022-04-07 15:26:04 +02:00
Paulus Schoutsen
5cadea91bb Bumped version to 2022.4.1 2022-04-06 22:36:39 -07:00
J. Nick Koston
f9d447e4cd Fix reloading the sun integration (#69495) 2022-04-06 22:35:55 -07:00
Shay Levy
23bb38c5cf Fix remote_rpi_gpio missing requirement (#69488) 2022-04-06 22:35:55 -07:00
Joakim Sørensen
4c16563675 Bump pyhaversion from 22.04.0 to 22.4.1 (#69486) 2022-04-06 22:35:54 -07:00
J. Nick Koston
9351fcf369 Fix reload race in unifiprotect (#69485)
- The integration already has a reload listener installed
  once it is setup. We should not reload from the config
  flow since they compete
2022-04-06 22:35:53 -07:00
Michael
2d74beaa67 Ignore IPv6 link local address on ssdp discovery in Fritz!Smarthome (#69455) 2022-04-06 22:35:52 -07:00
J. Nick Koston
87ab96f9c1 Fix elkm1 connection when panel drops VN request (#69454) 2022-04-06 22:35:52 -07:00
Paulus Schoutsen
0eed329bc8 Fix telegram broadcast (#69452) 2022-04-06 22:35:51 -07:00
Dave T
ea5e894ac7 Continue on template error during yaml import for generic (#69440)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2022-04-06 22:35:50 -07:00
Raman Gupta
91d2fafe1d Add comments to zwave_js node metadata WS API (#67210)
* Add comments to zwave_js node metadata WS API

* Add test dat
2022-04-06 22:35:50 -07:00
Franck Nijhof
7dd19066e8 Merge pull request #69413 from home-assistant/rc 2022-04-06 15:14:55 +02:00
Franck Nijhof
be3c1055dd Bumped version to 2022.4.0 2022-04-06 14:01:53 +02:00
René Klomp
5a24dbbbf2 Update pysma to 0.6.11 (#69397) 2022-04-06 14:00:49 +02:00
Erik Montnemery
8174b831cf Restore attributes of template binary sensor (#69350) 2022-04-06 14:00:46 +02:00
Raman Gupta
8c794ecf93 Fix regression in zwave_js (#69312)
* Handle unique ID update during discovery step

* Use callback to convert unique IDs to strings

* Adjust test to make sure logic works

* Fix other tests

* Move comment

* Move migration to async_setup

* Remove async_migrate_entry since we take care of it during setup

* Remove unused test
2022-04-06 14:00:42 +02:00
hesselonline
072cd29b90 Fix Wallbox charger status (#68708)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2022-04-06 14:00:31 +02:00
Paulus Schoutsen
e3b20cf43f Bumped version to 2022.4.0b6 2022-04-05 16:40:33 -07:00
Zack Barett
2296d0fbee 20220405.0 (#69377) 2022-04-05 16:40:27 -07:00
J. Nick Koston
1e6f8fc48a Abort samsungtv discovery of legacy devices when unique id not available (#69376) 2022-04-05 16:40:26 -07:00
Franck Nijhof
4038575806 Disable Spotify Media Player entity by default (#69372) 2022-04-05 16:40:25 -07:00
Johan Nenzén
531aa87170 Bump pyplaato to 0.0.16 (#69361) 2022-04-05 16:40:24 -07:00
Marvin Wichmann
1896e39f60 Update XKNX to version 0.20.1 (#69353) 2022-04-05 16:40:23 -07:00
starkillerOG
a42327ffce bump pynetgear to 0.9.4 (#69346)
* Bump home-assistant/wheels from 2022.01.0 to 2022.01.1

Bumps [home-assistant/wheels](https://github.com/home-assistant/wheels) from 2022.01.0 to 2022.01.1.
- [Release notes](https://github.com/home-assistant/wheels/releases)
- [Commits](https://github.com/home-assistant/wheels/compare/2022.01.0...2022.01.1)

---
updated-dependencies:
- dependency-name: home-assistant/wheels
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

* Bump home-assistant/wheels from 2022.01.1 to 2022.01.2

Bumps [home-assistant/wheels](https://github.com/home-assistant/wheels) from 2022.01.1 to 2022.01.2.
- [Release notes](https://github.com/home-assistant/wheels/releases)
- [Commits](https://github.com/home-assistant/wheels/compare/2022.01.1...2022.01.2)

---
updated-dependencies:
- dependency-name: home-assistant/wheels
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

* Bump home-assistant/builder from 2021.12.0 to 2022.01.0

Bumps [home-assistant/builder](https://github.com/home-assistant/builder) from 2021.12.0 to 2022.01.0.
- [Release notes](https://github.com/home-assistant/builder/releases)
- [Commits](https://github.com/home-assistant/builder/compare/2021.12.0...2022.01.0)

---
updated-dependencies:
- dependency-name: home-assistant/builder
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

* bump pynetgear to 0.9.4

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-04-05 16:40:22 -07:00
shbatm
def04f1ae8 Bump PyISY to 3.0.6 to fix group statuses (#69345) 2022-04-05 16:40:21 -07:00
J. Nick Koston
a39a6fce2a Fix roomba doing I/O in the event loop (#69339) 2022-04-05 16:40:21 -07:00
J. Nick Koston
7b36434101 Try exact match first for update state (#69335)
- Exact matches are much cheaper than creating an AwesomeVersion
  and calling the __gt__ method, and since most of the time the
  result is expected to be off, we want to optimize for this case
2022-04-05 16:40:20 -07:00
Philip Allgaier
a3ac495e03 Prevent issues with config update of "Timer" integration (unknown "restore" key) (#69332) 2022-04-05 16:40:19 -07:00
Joakim Sørensen
186d8c9d50 Bump pyhaversion from 22.02.0 to 22.04.0 (#69329) 2022-04-05 16:40:19 -07:00
Martin Hjelmare
e94fad469f Use recorder executor in demo (#69327) 2022-04-05 16:40:18 -07:00
J. Nick Koston
90d5bd12fb Ensure state is restored when turning on tplink lights without a color mode (#69308) 2022-04-05 16:40:17 -07:00
MoellerDi
685af1dd5c Fix "Camera not found" error in microsoft_face integration (#69295) 2022-04-05 16:40:17 -07:00
Erik Montnemery
44fefa42a8 Improve integration translation strings (#69246)
* Improve integration translation strings

* Update
2022-04-05 16:40:16 -07:00
154 changed files with 1718 additions and 590 deletions

View File

@@ -3,7 +3,7 @@
"name": "Airzone",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airzone",
"requirements": ["aioairzone==0.2.3"],
"requirements": ["aioairzone==0.3.3"],
"codeowners": ["@Noltari"],
"iot_class": "local_polling",
"loggers": ["aioairzone"]

View File

@@ -46,6 +46,17 @@ class BMWSensorEntityDescription(SensorEntityDescription):
value: Callable = lambda x, y: x
def convert_and_round(
state: tuple,
converter: Callable[[float | None, str], float],
precision: int,
) -> float | None:
"""Safely convert and round a value from a Tuple[value, unit]."""
if state[0] is None:
return None
return round(converter(state[0], UNIT_MAP.get(state[1], state[1])), precision)
SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
# --- Generic ---
"charging_start_time": BMWSensorEntityDescription(
@@ -78,45 +89,35 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:speedometer",
unit_metric=LENGTH_KILOMETERS,
unit_imperial=LENGTH_MILES,
value=lambda x, hass: round(
hass.config.units.length(x[0], UNIT_MAP.get(x[1], x[1])), 2
),
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
),
"remaining_range_total": BMWSensorEntityDescription(
key="remaining_range_total",
icon="mdi:map-marker-distance",
unit_metric=LENGTH_KILOMETERS,
unit_imperial=LENGTH_MILES,
value=lambda x, hass: round(
hass.config.units.length(x[0], UNIT_MAP.get(x[1], x[1])), 2
),
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
),
"remaining_range_electric": BMWSensorEntityDescription(
key="remaining_range_electric",
icon="mdi:map-marker-distance",
unit_metric=LENGTH_KILOMETERS,
unit_imperial=LENGTH_MILES,
value=lambda x, hass: round(
hass.config.units.length(x[0], UNIT_MAP.get(x[1], x[1])), 2
),
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
),
"remaining_range_fuel": BMWSensorEntityDescription(
key="remaining_range_fuel",
icon="mdi:map-marker-distance",
unit_metric=LENGTH_KILOMETERS,
unit_imperial=LENGTH_MILES,
value=lambda x, hass: round(
hass.config.units.length(x[0], UNIT_MAP.get(x[1], x[1])), 2
),
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
),
"remaining_fuel": BMWSensorEntityDescription(
key="remaining_fuel",
icon="mdi:gas-station",
unit_metric=VOLUME_LITERS,
unit_imperial=VOLUME_GALLONS,
value=lambda x, hass: round(
hass.config.units.volume(x[0], UNIT_MAP.get(x[1], x[1])), 2
),
value=lambda x, hass: convert_and_round(x, hass.config.units.volume, 2),
),
"fuel_percent": BMWSensorEntityDescription(
key="fuel_percent",

View File

@@ -469,7 +469,8 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
# The only way we can turn the Chromecast is on is by launching an app
if self._chromecast.cast_type == pychromecast.const.CAST_TYPE_CHROMECAST:
self._chromecast.play_media(CAST_SPLASH, "image/png")
app_data = {"media_id": CAST_SPLASH, "media_type": "image/png"}
quick_play(self._chromecast, "default_media_receiver", app_data)
else:
self._chromecast.start_app(pychromecast.config.APP_MEDIA_RECEIVER)

View File

@@ -75,15 +75,19 @@ def async_condition_from_config(
hass: HomeAssistant, config: ConfigType
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config[CONF_TYPE] == "is_hvac_mode":
attribute = const.ATTR_HVAC_MODE
else:
attribute = const.ATTR_PRESET_MODE
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
state = hass.states.get(config[ATTR_ENTITY_ID])
return state.attributes.get(attribute) == config[attribute] if state else False
if (state := hass.states.get(config[ATTR_ENTITY_ID])) is None:
return False
if config[CONF_TYPE] == "is_hvac_mode":
return state.state == config[const.ATTR_HVAC_MODE]
return (
state.attributes.get(const.ATTR_PRESET_MODE)
== config[const.ATTR_PRESET_MODE]
)
return test_is_state

View File

@@ -5,6 +5,7 @@ from random import random
from homeassistant import config_entries, setup
from homeassistant.components import persistent_notification
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.statistics import (
async_add_external_statistics,
get_last_statistics,
@@ -245,7 +246,7 @@ async def _insert_statistics(hass):
}
statistic_id = f"{DOMAIN}:energy_consumption"
sum_ = 0
last_stats = await hass.async_add_executor_job(
last_stats = await get_instance(hass).async_add_executor_job(
get_last_statistics, hass, 1, statistic_id, True
)
if "domain:energy_consumption" in last_stats:

View File

@@ -66,9 +66,9 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
self, homecontrol: HomeControl, device_instance: Zwave, element_uid: str
) -> None:
"""Initialize a devolo binary sensor."""
self._binary_sensor_property = device_instance.binary_sensor_property.get(
self._binary_sensor_property = device_instance.binary_sensor_property[
element_uid
)
]
super().__init__(
homecontrol=homecontrol,
@@ -82,10 +82,12 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
)
if self._attr_device_class is None:
if device_instance.binary_sensor_property.get(element_uid).sub_type != "":
self._attr_name += f" {device_instance.binary_sensor_property.get(element_uid).sub_type}"
if device_instance.binary_sensor_property[element_uid].sub_type != "":
self._attr_name += (
f" {device_instance.binary_sensor_property[element_uid].sub_type}"
)
else:
self._attr_name += f" {device_instance.binary_sensor_property.get(element_uid).sensor_type}"
self._attr_name += f" {device_instance.binary_sensor_property[element_uid].sensor_type}"
self._value = self._binary_sensor_property.state
@@ -114,9 +116,9 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
key: int,
) -> None:
"""Initialize a devolo remote control."""
self._remote_control_property = device_instance.remote_control_property.get(
self._remote_control_property = device_instance.remote_control_property[
element_uid
)
]
super().__init__(
homecontrol=homecontrol,

View File

@@ -63,7 +63,7 @@ class DevoloCoverDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, CoverEntity):
@property
def current_cover_position(self) -> int:
"""Return the current position. 0 is closed. 100 is open."""
return self._value
return int(self._value)
@property
def is_closed(self) -> bool:

View File

@@ -46,7 +46,7 @@ class DevoloDeviceEntity(Entity):
self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: int
self._value: float
async def async_added_to_hass(self) -> None:
"""Call when entity is added to hass."""

View File

@@ -2,7 +2,7 @@
"domain": "devolo_home_control",
"name": "devolo Home Control",
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
"requirements": ["devolo-home-control-api==0.17.4"],
"requirements": ["devolo-home-control-api==0.18.1"],
"after_dependencies": ["zeroconf"],
"config_flow": true,
"codeowners": ["@2Fake", "@Shutgun"],

View File

@@ -83,7 +83,7 @@ class DevoloMultiLevelDeviceEntity(DevoloDeviceEntity, SensorEntity):
"""Abstract representation of a multi level sensor within devolo Home Control."""
@property
def native_value(self) -> int:
def native_value(self) -> float:
"""Return the state of the sensor."""
return self._value

View File

@@ -54,8 +54,8 @@ class DevoloSirenDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, SirenEntity):
)
self._attr_available_tones = [
*range(
self._multi_level_switch_property.min,
self._multi_level_switch_property.max + 1,
int(self._multi_level_switch_property.min),
int(self._multi_level_switch_property.max) + 1,
)
]
self._attr_supported_features = (

View File

@@ -50,9 +50,9 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
device_instance=device_instance,
element_uid=element_uid,
)
self._binary_switch_property = self._device_instance.binary_switch_property.get(
self._attr_unique_id
)
self._binary_switch_property = self._device_instance.binary_switch_property[
self._attr_unique_id # type: ignore[index]
]
self._attr_is_on = self._binary_switch_property.state
def turn_on(self, **kwargs: Any) -> None:

View File

@@ -363,6 +363,9 @@ async def async_wait_for_elk_to_sync(
# VN is the first command sent for panel, when we get
# it back we now we are logged in either with or without a password
elk.add_handler("VN", first_response)
# Some panels do not respond to the vn request so we
# check for lw as well
elk.add_handler("LW", first_response)
elk.add_handler("sync_complete", sync_complete)
for name, event, timeout in (
("login", login_event, login_timeout),

View File

@@ -489,7 +489,7 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
# Fetch the needed statistics metadata
statistics_metadata.update(
await hass.async_add_executor_job(
await recorder.get_instance(hass).async_add_executor_job(
functools.partial(
recorder.statistics.get_metadata,
hass,

View File

@@ -260,7 +260,7 @@ async def ws_get_fossil_energy_consumption(
statistic_ids.append(msg["co2_statistic_id"])
# Fetch energy + CO2 statistics
statistics = await hass.async_add_executor_job(
statistics = await recorder.get_instance(hass).async_add_executor_job(
recorder.statistics.statistics_during_period,
hass,
start_time,

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from contextlib import suppress
from functools import partial
from homeassistant.components.light import (
@@ -198,16 +199,21 @@ class FibaroLight(FibaroDevice, LightEntity):
Dimmable and RGB lights can be on based on different
properties, so we need to check here several values.
JSON for HC2 uses always string, HC3 uses int for integers.
"""
props = self.fibaro_device.properties
if self.current_binary_state:
return True
if "brightness" in props and props.brightness != "0":
return True
if "currentProgram" in props and props.currentProgram != "0":
return True
if "currentProgramID" in props and props.currentProgramID != "0":
return True
with suppress(ValueError, TypeError):
if "brightness" in props and int(props.brightness) != 0:
return True
with suppress(ValueError, TypeError):
if "currentProgram" in props and int(props.currentProgram) != 0:
return True
with suppress(ValueError, TypeError):
if "currentProgramID" in props and int(props.currentProgramID) != 0:
return True
return False

View File

@@ -1,6 +1,7 @@
"""Config flow for AVM FRITZ!SmartHome."""
from __future__ import annotations
import ipaddress
from typing import Any
from urllib.parse import urlparse
@@ -120,6 +121,12 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
assert isinstance(host, str)
self.context[CONF_HOST] = host
if (
ipaddress.ip_address(host).version == 6
and ipaddress.ip_address(host).is_link_local
):
return self.async_abort(reason="ignore_ip6_link_local")
if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN):
if uuid.startswith("uuid:"):
uuid = uuid[5:]

View File

@@ -28,6 +28,7 @@
"abort": {
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"ignore_ip6_link_local": "IPv6 link local address is not supported.",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
"not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"

View File

@@ -3,6 +3,7 @@
"abort": {
"already_configured": "Device is already configured",
"already_in_progress": "Configuration flow is already in progress",
"ignore_ip6_link_local": "IPv6 link local address is not supported.",
"no_devices_found": "No devices found on the network",
"not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.",
"reauth_successful": "Re-authentication was successful"

View File

@@ -2,7 +2,7 @@
"domain": "frontend",
"name": "Home Assistant Frontend",
"documentation": "https://www.home-assistant.io/integrations/frontend",
"requirements": ["home-assistant-frontend==20220401.0"],
"requirements": ["home-assistant-frontend==20220405.0"],
"dependencies": [
"api",
"auth",

View File

@@ -3,7 +3,7 @@
"name": "Global Disaster Alert and Coordination System (GDACS)",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/gdacs",
"requirements": ["aio_georss_gdacs==0.5"],
"requirements": ["aio_georss_gdacs==0.7"],
"codeowners": ["@exxamalte"],
"quality_scale": "platinum",
"iot_class": "cloud_polling"

View File

@@ -58,7 +58,7 @@ DEFAULT_DATA = {
CONF_VERIFY_SSL: True,
}
SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml"}
SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"}
def build_schema(
@@ -109,6 +109,20 @@ def build_schema(
return vol.Schema(spec)
def build_schema_content_type(user_input: dict[str, Any] | MappingProxyType[str, Any]):
"""Create schema for conditional 2nd page specifying stream content_type."""
return vol.Schema(
{
vol.Required(
CONF_CONTENT_TYPE,
description={
"suggested_value": user_input.get(CONF_CONTENT_TYPE, "image/jpeg")
},
): str,
}
)
def get_image_type(image):
"""Get the format of downloaded bytes that could be an image."""
fmt = None
@@ -129,14 +143,14 @@ async def async_test_still(hass, info) -> tuple[dict[str, str], str | None]:
"""Verify that the still image is valid before we create an entity."""
fmt = None
if not (url := info.get(CONF_STILL_IMAGE_URL)):
return {}, None
return {}, info.get(CONF_CONTENT_TYPE, "image/jpeg")
if not isinstance(url, template_helper.Template) and url:
url = cv.template(url)
url.hass = hass
try:
url = url.async_render(parse_result=False)
except TemplateError as err:
_LOGGER.error("Error parsing template %s: %s", url, err)
_LOGGER.warning("Problem rendering template %s: %s", url, err)
return {CONF_STILL_IMAGE_URL: "template_error"}, None
verify_ssl = info.get(CONF_VERIFY_SSL)
auth = generate_auth(info)
@@ -228,6 +242,11 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
def __init__(self):
"""Initialize Generic ConfigFlow."""
self.cached_user_input: dict[str, Any] = {}
self.cached_title = ""
@staticmethod
def async_get_options_flow(
config_entry: ConfigEntry,
@@ -238,8 +257,8 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
def check_for_existing(self, options):
"""Check whether an existing entry is using the same URLs."""
return any(
entry.options[CONF_STILL_IMAGE_URL] == options[CONF_STILL_IMAGE_URL]
and entry.options[CONF_STREAM_SOURCE] == options[CONF_STREAM_SOURCE]
entry.options.get(CONF_STILL_IMAGE_URL) == options.get(CONF_STILL_IMAGE_URL)
and entry.options.get(CONF_STREAM_SOURCE) == options.get(CONF_STREAM_SOURCE)
for entry in self._async_current_entries()
)
@@ -264,10 +283,17 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
if not errors:
user_input[CONF_CONTENT_TYPE] = still_format
user_input[CONF_LIMIT_REFETCH_TO_URL_CHANGE] = False
await self.async_set_unique_id(self.flow_id)
return self.async_create_entry(
title=name, data={}, options=user_input
)
if user_input.get(CONF_STILL_IMAGE_URL):
await self.async_set_unique_id(self.flow_id)
return self.async_create_entry(
title=name, data={}, options=user_input
)
# If user didn't specify a still image URL,
# we can't (yet) autodetect it from the stream.
# Show a conditional 2nd page to ask them the content type.
self.cached_user_input = user_input
self.cached_title = name
return await self.async_step_content_type()
else:
user_input = DEFAULT_DATA.copy()
@@ -277,13 +303,28 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_content_type(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the user's choice for stream content_type."""
if user_input is not None:
user_input = self.cached_user_input | user_input
await self.async_set_unique_id(self.flow_id)
return self.async_create_entry(
title=self.cached_title, data={}, options=user_input
)
return self.async_show_form(
step_id="content_type",
data_schema=build_schema_content_type({}),
errors={},
)
async def async_step_import(self, import_config) -> FlowResult:
"""Handle config import from yaml."""
# abort if we've already got this one.
if self.check_for_existing(import_config):
return self.async_abort(reason="already_exists")
errors, still_format = await async_test_still(self.hass, import_config)
errors = errors | await async_test_stream(self.hass, import_config)
# Don't bother testing the still or stream details on yaml import.
still_url = import_config.get(CONF_STILL_IMAGE_URL)
stream_url = import_config.get(CONF_STREAM_SOURCE)
name = import_config.get(
@@ -291,15 +332,10 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
)
if CONF_LIMIT_REFETCH_TO_URL_CHANGE not in import_config:
import_config[CONF_LIMIT_REFETCH_TO_URL_CHANGE] = False
if not errors:
import_config[CONF_CONTENT_TYPE] = still_format
await self.async_set_unique_id(self.flow_id)
return self.async_create_entry(title=name, data={}, options=import_config)
_LOGGER.error(
"Error importing generic IP camera platform config: unexpected error '%s'",
list(errors.values()),
)
return self.async_abort(reason="unknown")
still_format = import_config.get(CONF_CONTENT_TYPE, "image/jpeg")
import_config[CONF_CONTENT_TYPE] = still_format
await self.async_set_unique_id(self.flow_id)
return self.async_create_entry(title=name, data={}, options=import_config)
class GenericOptionsFlowHandler(OptionsFlow):
@@ -308,6 +344,8 @@ class GenericOptionsFlowHandler(OptionsFlow):
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize Generic IP Camera options flow."""
self.config_entry = config_entry
self.cached_user_input: dict[str, Any] = {}
self.cached_title = ""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
@@ -316,29 +354,52 @@ class GenericOptionsFlowHandler(OptionsFlow):
errors: dict[str, str] = {}
if user_input is not None:
errors, still_format = await async_test_still(self.hass, user_input)
errors, still_format = await async_test_still(
self.hass, self.config_entry.options | user_input
)
errors = errors | await async_test_stream(self.hass, user_input)
still_url = user_input.get(CONF_STILL_IMAGE_URL)
stream_url = user_input.get(CONF_STREAM_SOURCE)
if not errors:
return self.async_create_entry(
title=slug_url(still_url) or slug_url(stream_url) or DEFAULT_NAME,
data={
CONF_AUTHENTICATION: user_input.get(CONF_AUTHENTICATION),
CONF_STREAM_SOURCE: user_input.get(CONF_STREAM_SOURCE),
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
CONF_STILL_IMAGE_URL: user_input.get(CONF_STILL_IMAGE_URL),
CONF_CONTENT_TYPE: still_format,
CONF_USERNAME: user_input.get(CONF_USERNAME),
CONF_LIMIT_REFETCH_TO_URL_CHANGE: user_input[
CONF_LIMIT_REFETCH_TO_URL_CHANGE
],
CONF_FRAMERATE: user_input[CONF_FRAMERATE],
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
},
)
title = slug_url(still_url) or slug_url(stream_url) or DEFAULT_NAME
data = {
CONF_AUTHENTICATION: user_input.get(CONF_AUTHENTICATION),
CONF_STREAM_SOURCE: user_input.get(CONF_STREAM_SOURCE),
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
CONF_STILL_IMAGE_URL: user_input.get(CONF_STILL_IMAGE_URL),
CONF_CONTENT_TYPE: still_format
or self.config_entry.options.get(CONF_CONTENT_TYPE),
CONF_USERNAME: user_input.get(CONF_USERNAME),
CONF_LIMIT_REFETCH_TO_URL_CHANGE: user_input[
CONF_LIMIT_REFETCH_TO_URL_CHANGE
],
CONF_FRAMERATE: user_input[CONF_FRAMERATE],
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
}
if still_url:
return self.async_create_entry(
title=title,
data=data,
)
self.cached_title = title
self.cached_user_input = data
return await self.async_step_content_type()
return self.async_show_form(
step_id="init",
data_schema=build_schema(user_input or self.config_entry.options, True),
errors=errors,
)
async def async_step_content_type(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the user's choice for stream content_type."""
if user_input is not None:
user_input = self.cached_user_input | user_input
return self.async_create_entry(title=self.cached_title, data=user_input)
return self.async_show_form(
step_id="content_type",
data_schema=build_schema_content_type(self.cached_user_input),
errors={},
)

View File

@@ -2,7 +2,7 @@
"domain": "generic",
"name": "Generic Camera",
"config_flow": true,
"requirements": ["av==9.0.0", "pillow==9.0.1"],
"requirements": ["av==8.1.0", "pillow==9.0.1"],
"documentation": "https://www.home-assistant.io/integrations/generic",
"codeowners": ["@davet2001"],
"iot_class": "local_push"

View File

@@ -30,11 +30,16 @@
"limit_refetch_to_url_change": "Limit refetch to url change",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]",
"content_type": "Content Type",
"framerate": "Frame Rate (Hz)",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
}
},
"content_type": {
"description": "Specify the content type for the stream.",
"data": {
"content_type": "Content Type"
}
},
"confirm": {
"description": "[%key:common::config_flow::description::confirm_setup%]"
}
@@ -51,10 +56,15 @@
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::data::limit_refetch_to_url_change%]",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]",
"content_type": "[%key:component::generic::config::step::user::data::content_type%]",
"framerate": "[%key:component::generic::config::step::user::data::framerate%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
}
},
"content_type": {
"description": "[%key:component::generic::config::step::content_type::description%]",
"data": {
"content_type": "[%key:component::generic::config::step::content_type::data::content_type%]"
}
}
},
"error": {

View File

@@ -23,10 +23,15 @@
"confirm": {
"description": "Do you want to start set up?"
},
"content_type": {
"data": {
"content_type": "Content Type"
},
"description": "Specify the content type for the stream."
},
"user": {
"data": {
"authentication": "Authentication",
"content_type": "Content Type",
"framerate": "Frame Rate (Hz)",
"limit_refetch_to_url_change": "Limit refetch to url change",
"password": "Password",
@@ -57,10 +62,15 @@
"unknown": "Unexpected error"
},
"step": {
"content_type": {
"data": {
"content_type": "Content Type"
},
"description": "Specify the content type for the stream."
},
"init": {
"data": {
"authentication": "Authentication",
"content_type": "Content Type",
"framerate": "Frame Rate (Hz)",
"limit_refetch_to_url_change": "Limit refetch to url change",
"password": "Password",

View File

@@ -7,6 +7,7 @@ from datetime import datetime, timedelta
import logging
from typing import Any
import aiohttp
from httplib2.error import ServerNotFoundError
from oauth2client.file import Storage
import voluptuous as vol
@@ -24,7 +25,11 @@ from homeassistant.const import (
CONF_OFFSET,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
)
from homeassistant.helpers import config_entry_oauth2_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
@@ -185,8 +190,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass, entry
)
)
assert isinstance(implementation, DeviceAuth)
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
# Force a token refresh to fix a bug where tokens were persisted with
# expires_in (relative time delta) and expires_at (absolute time) swapped.
# A google session token typically only lasts a few days between refresh.
now = datetime.now()
if session.token["expires_at"] >= (now + timedelta(days=365)).timestamp():
session.token["expires_in"] = 0
session.token["expires_at"] = now.timestamp()
try:
await session.async_ensure_token_valid()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed from err
raise ConfigEntryNotReady from err
except aiohttp.ClientError as err:
raise ConfigEntryNotReady from err
required_scope = hass.data[DOMAIN][DATA_CONFIG][CONF_CALENDAR_ACCESS].scope
if required_scope not in session.token.get("scope", []):
raise ConfigEntryAuthFailed(

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from collections.abc import Awaitable, Callable
import datetime
import logging
import time
from typing import Any
from googleapiclient import discovery as google_discovery
@@ -58,7 +59,7 @@ class DeviceAuth(config_entry_oauth2_flow.LocalOAuth2Implementation):
"refresh_token": creds.refresh_token,
"scope": " ".join(creds.scopes),
"token_type": "Bearer",
"expires_in": creds.token_expiry.timestamp(),
"expires_in": creds.token_expiry.timestamp() - time.time(),
}
@@ -157,16 +158,16 @@ def _async_google_creds(hass: HomeAssistant, token: dict[str, Any]) -> Credentia
client_id=conf[CONF_CLIENT_ID],
client_secret=conf[CONF_CLIENT_SECRET],
refresh_token=token["refresh_token"],
token_expiry=token["expires_at"],
token_expiry=datetime.datetime.fromtimestamp(token["expires_at"]),
token_uri=oauth2client.GOOGLE_TOKEN_URI,
scopes=[conf[CONF_CALENDAR_ACCESS].scope],
user_agent=None,
)
def _api_time_format(time: datetime.datetime | None) -> str | None:
def _api_time_format(date_time: datetime.datetime | None) -> str | None:
"""Convert a datetime to the api string format."""
return time.isoformat("T") if time else None
return date_time.isoformat("T") if date_time else None
class GoogleCalendarService:

View File

@@ -183,7 +183,9 @@ class GoogleCalendarEventDevice(CalendarEventDevice):
valid_items = filter(self._event_filter, items)
self._event = copy.deepcopy(next(valid_items, None))
if self._event:
(summary, offset) = extract_offset(self._event["summary"], self._offset)
(summary, offset) = extract_offset(
self._event.get("summary", ""), self._offset
)
self._event["summary"] = summary
self._offset_reached = is_offset_reached(
get_date(self._event["start"]), offset

View File

@@ -34,7 +34,7 @@ class OAuth2FlowHandler(
return logging.getLogger(__name__)
async def async_step_import(self, info: dict[str, Any]) -> FlowResult:
"""Import existing auth from Nest."""
"""Import existing auth into a new config entry."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
implementations = await config_entry_oauth2_flow.async_get_implementations(

View File

@@ -6,7 +6,7 @@
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
"description": "The Nest integration needs to re-authenticate your account"
"description": "The Google Calendar integration needs to re-authenticate your account"
},
"auth": {
"title": "Link Google Account"

View File

@@ -3,7 +3,7 @@
"abort": {
"already_configured": "Account is already configured",
"already_in_progress": "Configuration flow is already in progress",
"code_expired": "Authentication code expired, please try again.",
"code_expired": "Authentication code expired or credential setup is invalid, please try again.",
"invalid_access_token": "Invalid access token",
"missing_configuration": "The component is not configured. Please follow the documentation.",
"oauth_error": "Received invalid token data.",
@@ -23,7 +23,7 @@
"title": "Pick Authentication Method"
},
"reauth_confirm": {
"description": "The Nest integration needs to re-authenticate your account",
"description": "The Google Calendar integration needs to re-authenticate your account",
"title": "Reauthenticate Integration"
}
}

View File

@@ -711,7 +711,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
dev_reg = await async_get_registry(hass)
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
hass.data[ADDONS_COORDINATOR] = coordinator
await coordinator.async_refresh()
await coordinator.async_config_entry_first_refresh()
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
@@ -824,7 +824,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
self.data = {}
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self.is_hass_os = "hassos" in get_info(self.hass)
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
@@ -848,8 +848,8 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
new_data[DATA_KEY_ADDONS] = {
addon[ATTR_SLUG]: {
**addon,
**((addons_stats or {}).get(addon[ATTR_SLUG], {})),
ATTR_AUTO_UPDATE: addons_info.get(addon[ATTR_SLUG], {}).get(
**((addons_stats or {}).get(addon[ATTR_SLUG]) or {}),
ATTR_AUTO_UPDATE: (addons_info.get(addon[ATTR_SLUG]) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_CHANGELOG: (addons_changelogs or {}).get(addon[ATTR_SLUG]),
@@ -891,6 +891,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
if not self.is_hass_os and (
dev := self.dev_reg.async_get_device({(DOMAIN, "OS")})
):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
# If there are new add-ons, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
@@ -946,15 +952,27 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
async def _update_addon_stats(self, slug):
"""Update single addon stats."""
stats = await self.hassio.get_addon_stats(slug)
return (slug, stats)
try:
stats = await self.hassio.get_addon_stats(slug)
return (slug, stats)
except HassioAPIError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
async def _update_addon_changelog(self, slug):
"""Return the changelog for an add-on."""
changelog = await self.hassio.get_addon_changelog(slug)
return (slug, changelog)
try:
changelog = await self.hassio.get_addon_changelog(slug)
return (slug, changelog)
except HassioAPIError as err:
_LOGGER.warning("Could not fetch changelog for %s: %s", slug, err)
return (slug, None)
async def _update_addon_info(self, slug):
"""Return the info for an add-on."""
info = await self.hassio.get_addon_info(slug)
return (slug, info)
try:
info = await self.hassio.get_addon_info(slug)
return (slug, info)
except HassioAPIError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)

View File

@@ -652,7 +652,7 @@ def _exclude_by_entity_registry(
(entry := ent_reg.async_get(entity_id))
and (
entry.hidden_by is not None
or (not include_entity_category or entry.entity_category is not None)
or (not include_entity_category and entry.entity_category is not None)
)
)

View File

@@ -83,7 +83,7 @@ CONFIG_SCHEMA = vol.Schema(
{"select": {"options": UNIT_PREFIXES}}
),
vol.Required(CONF_UNIT_TIME, default=TIME_HOURS): selector.selector(
{"select": {"options": TIME_UNITS}}
{"select": {"options": TIME_UNITS, "mode": "dropdown"}}
),
}
)

View File

@@ -1,26 +1,34 @@
{
"title": "Integration - Riemann sum integral sensor",
"config": {
"step": {
"user": {
"title": "New Integration sensor",
"description": "Precision controls the number of decimal digits in the output.\nThe sum will be scaled according to the selected metric prefix and integration time.",
"title": "Add Riemann sum integral sensor",
"description": "Create a sensor that calculates a Riemann sum to estimate the integral of a sensor.",
"data": {
"method": "Integration method",
"name": "Name",
"round": "Precision",
"source": "Input sensor",
"unit_prefix": "Metric prefix",
"unit_time": "Integration time"
"unit_time": "Time unit"
},
"data_description": {
"round": "Controls the number of decimal digits in the output.",
"unit_prefix": "The output will be scaled according to the selected metric prefix.",
"unit_time": "The output will be scaled according to the selected time unit."
}
}
}
},
"options": {
"step": {
"options": {
"description": "Precision controls the number of decimal digits in the output.",
"init": {
"data": {
"round": "[%key:component::integration::config::step::user::data::round%]"
},
"data_description": {
"round": "[%key:component::integration::config::step::user::data_description::round%]"
}
}
}

View File

@@ -8,21 +8,29 @@
"round": "Precision",
"source": "Input sensor",
"unit_prefix": "Metric prefix",
"unit_time": "Integration time"
"unit_time": "Time unit"
},
"description": "Precision controls the number of decimal digits in the output.\nThe sum will be scaled according to the selected metric prefix and integration time.",
"title": "New Integration sensor"
"data_description": {
"round": "Controls the number of decimal digits in the output.",
"unit_prefix": "The output will be scaled according to the selected metric prefix.",
"unit_time": "The output will be scaled according to the selected time unit."
},
"description": "Create a sensor that calculates a Riemann sum to estimate the integral of a sensor.",
"title": "Add Riemann sum integral sensor"
}
}
},
"options": {
"step": {
"options": {
"init": {
"data": {
"round": "Precision"
},
"description": "Precision controls the number of decimal digits in the output."
"data_description": {
"round": "Controls the number of decimal digits in the output."
}
}
}
}
},
"title": "Integration - Riemann sum integral sensor"
}

View File

@@ -2,7 +2,7 @@
"domain": "isy994",
"name": "Universal Devices ISY994",
"documentation": "https://www.home-assistant.io/integrations/isy994",
"requirements": ["pyisy==3.0.5"],
"requirements": ["pyisy==3.0.6"],
"codeowners": ["@bdraco", "@shbatm"],
"config_flow": true,
"ssdp": [

View File

@@ -296,9 +296,3 @@ class KNXClimate(KnxEntity, ClimateEntity):
await super().async_added_to_hass()
if self._device.mode is not None:
self._device.mode.register_device_updated_cb(self.after_update_callback)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect device object when removed."""
await super().async_will_remove_from_hass()
if self._device.mode is not None:
self._device.mode.unregister_device_updated_cb(self.after_update_callback)

View File

@@ -45,4 +45,5 @@ class KnxEntity(Entity):
async def async_will_remove_from_hass(self) -> None:
"""Disconnect device object when removed."""
self._device.unregister_device_updated_cb(self.after_update_callback)
# will also remove callbacks
self._device.shutdown()

View File

@@ -3,7 +3,7 @@
"name": "KNX",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/knx",
"requirements": ["xknx==0.20.0"],
"requirements": ["xknx==0.20.2"],
"codeowners": ["@Julius2342", "@farmio", "@marvin-w"],
"quality_scale": "silver",
"iot_class": "local_push",

View File

@@ -32,6 +32,9 @@ def async_process_play_media_url(
"""Update a media URL with authentication if it points at Home Assistant."""
parsed = yarl.URL(media_content_id)
if parsed.scheme and parsed.scheme not in ("http", "https"):
return media_content_id
if parsed.is_absolute():
if not is_hass_url(hass, media_content_id):
return media_content_id

View File

@@ -10,6 +10,7 @@ from aiohttp.hdrs import CONTENT_TYPE
import async_timeout
import voluptuous as vol
from homeassistant.components import camera
from homeassistant.const import ATTR_NAME, CONF_API_KEY, CONF_TIMEOUT, CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import HomeAssistantError
@@ -181,7 +182,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
p_id = face.store[g_id].get(service.data[ATTR_PERSON])
camera_entity = service.data[ATTR_CAMERA_ENTITY]
camera = hass.components.camera
try:
image = await camera.async_get_image(hass, camera_entity)

View File

@@ -155,7 +155,7 @@ async def async_setup_entry(
platform.async_register_entity_service(
SERVICE_SET_ABSOLUTE_POSITION,
SET_ABSOLUTE_POSITION_SCHEMA,
SERVICE_SET_ABSOLUTE_POSITION,
"async_set_absolute_position",
)

View File

@@ -2,7 +2,7 @@
"domain": "mpd",
"name": "Music Player Daemon (MPD)",
"documentation": "https://www.home-assistant.io/integrations/mpd",
"requirements": ["python-mpd2==3.0.4"],
"requirements": ["python-mpd2==3.0.5"],
"codeowners": ["@fabaff"],
"iot_class": "local_polling",
"loggers": ["mpd"]

View File

@@ -463,7 +463,7 @@ class MpdDevice(MediaPlayerEntity):
if media_source.is_media_source_id(media_id):
media_type = MEDIA_TYPE_MUSIC
play_item = await media_source.async_resolve_media(self.hass, media_id)
media_id = play_item.url
media_id = async_process_play_media_url(self.hass, play_item.url)
if media_type == MEDIA_TYPE_PLAYLIST:
_LOGGER.debug("Playing playlist: %s", media_id)
@@ -476,8 +476,6 @@ class MpdDevice(MediaPlayerEntity):
await self._client.load(media_id)
await self._client.play()
else:
media_id = async_process_play_media_url(self.hass, media_id)
await self._client.clear()
self._currentplaylist = None
await self._client.add(media_id)

View File

@@ -2,7 +2,7 @@
"domain": "netgear",
"name": "NETGEAR",
"documentation": "https://www.home-assistant.io/integrations/netgear",
"requirements": ["pynetgear==0.9.3"],
"requirements": ["pynetgear==0.9.4"],
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
"iot_class": "local_polling",
"config_flow": true,

View File

@@ -90,10 +90,12 @@ class NetgearAllowBlock(NetgearDeviceEntity, SwitchEntity):
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
await self._router.async_allow_block_device(self._mac, ALLOW)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
await self._router.async_allow_block_device(self._mac, BLOCK)
await self.coordinator.async_request_refresh()
@callback
def async_update_device(self) -> None:

View File

@@ -3,7 +3,7 @@
"name": "NINA",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/nina",
"requirements": ["pynina==0.1.7"],
"requirements": ["pynina==0.1.8"],
"dependencies": [],
"codeowners": ["@DeerMaximum"],
"iot_class": "cloud_polling",

View File

@@ -204,9 +204,10 @@ class ONVIFDevice:
if self._dt_diff_seconds > 5:
LOGGER.warning(
"The date/time on the device (UTC) is '%s', "
"The date/time on %s (UTC) is '%s', "
"which is different from the system '%s', "
"this could lead to authentication issues",
self.name,
cam_date_utc,
system_date,
)

View File

@@ -39,8 +39,7 @@ def _select_option_open_closed_pedestrian(
OverkizCommandParam.CLOSED: OverkizCommand.CLOSE,
OverkizCommandParam.OPEN: OverkizCommand.OPEN,
OverkizCommandParam.PEDESTRIAN: OverkizCommand.SET_PEDESTRIAN_POSITION,
}[OverkizCommandParam(option)],
None,
}[OverkizCommandParam(option)]
)

View File

@@ -6,7 +6,7 @@
"dependencies": ["webhook"],
"after_dependencies": ["cloud"],
"codeowners": ["@JohNan"],
"requirements": ["pyplaato==0.0.15"],
"requirements": ["pyplaato==0.0.16"],
"iot_class": "cloud_push",
"loggers": ["pyplaato"]
}

View File

@@ -159,7 +159,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.data[CONF_SERVER],
error,
)
return False
# Retry as setups behind a proxy can return transient 404 or 502 errors
raise ConfigEntryNotReady from error
_LOGGER.debug(
"Connected to: %s (%s)", plex_server.friendly_name, plex_server.url_in_use

View File

@@ -173,7 +173,9 @@ def process_plex_payload(
media = plex_server.lookup_media(content_type, **search_query)
if supports_playqueues and (isinstance(media, list) or shuffle):
playqueue = plex_server.create_playqueue(media, shuffle=shuffle)
playqueue = plex_server.create_playqueue(
media, includeRelated=0, shuffle=shuffle
)
return PlexMediaSearchResult(playqueue, content)
return PlexMediaSearchResult(media, content)

View File

@@ -223,13 +223,18 @@ def call_base_info(power_wall: Powerwall, host: str) -> PowerwallBaseInfo:
def _fetch_powerwall_data(power_wall: Powerwall) -> PowerwallData:
"""Process and update powerwall data."""
try:
backup_reserve = power_wall.get_backup_reserve_percentage()
except MissingAttributeError:
backup_reserve = None
return PowerwallData(
charge=power_wall.get_charge(),
site_master=power_wall.get_sitemaster(),
meters=power_wall.get_meters(),
grid_services_active=power_wall.is_grid_services_active(),
grid_status=power_wall.get_grid_status(),
backup_reserve=power_wall.get_backup_reserve_percentage(),
backup_reserve=backup_reserve,
)

View File

@@ -38,7 +38,7 @@ class PowerwallData:
meters: MetersAggregates
grid_services_active: bool
grid_status: GridStatus
backup_reserve: float
backup_reserve: float | None
class PowerwallRuntimeData(TypedDict):

View File

@@ -117,9 +117,11 @@ async def async_setup_entry(
data: PowerwallData = coordinator.data
entities: list[PowerWallEntity] = [
PowerWallChargeSensor(powerwall_data),
PowerWallBackupReserveSensor(powerwall_data),
]
if data.backup_reserve is not None:
entities.append(PowerWallBackupReserveSensor(powerwall_data))
for meter in data.meters.meters:
entities.append(PowerWallExportSensor(powerwall_data, meter))
entities.append(PowerWallImportSensor(powerwall_data, meter))
@@ -190,8 +192,10 @@ class PowerWallBackupReserveSensor(PowerWallEntity, SensorEntity):
return f"{self.base_unique_id}_backup_reserve"
@property
def native_value(self) -> int:
def native_value(self) -> int | None:
"""Get the current value in percentage."""
if self.data.backup_reserve is None:
return None
return round(self.data.backup_reserve)

View File

@@ -12,7 +12,7 @@ import logging
import os
import re
from statistics import mean
from typing import TYPE_CHECKING, Any, Literal
from typing import TYPE_CHECKING, Any, Literal, overload
from sqlalchemy import bindparam, func
from sqlalchemy.exc import SQLAlchemyError, StatementError
@@ -125,9 +125,9 @@ STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery"
STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery"
# Convert pressure and temperature statistics from the native unit used for statistics
# to the units configured by the user
UNIT_CONVERSIONS = {
# Convert pressure, temperature and volume statistics from the normalized unit used for
# statistics to the unit configured by the user
STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS = {
PRESSURE_PA: lambda x, units: pressure_util.convert(
x, PRESSURE_PA, units.pressure_unit
)
@@ -145,6 +145,17 @@ UNIT_CONVERSIONS = {
else None,
}
# Convert volume statistics from the display unit configured by the user
# to the normalized unit used for statistics
# This is used to support adjusting statistics in the display unit
DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS: dict[
str, Callable[[float, UnitSystem], float]
] = {
VOLUME_CUBIC_FEET: lambda x, units: volume_util.convert(
x, _configured_unit(VOLUME_CUBIC_METERS, units), VOLUME_CUBIC_METERS
),
}
_LOGGER = logging.getLogger(__name__)
@@ -721,7 +732,17 @@ def get_metadata(
)
@overload
def _configured_unit(unit: None, units: UnitSystem) -> None:
...
@overload
def _configured_unit(unit: str, units: UnitSystem) -> str:
...
def _configured_unit(unit: str | None, units: UnitSystem) -> str | None:
"""Return the pressure and temperature units configured by the user."""
if unit == PRESSURE_PA:
return units.pressure_unit
@@ -1163,7 +1184,7 @@ def _sorted_statistics_to_dict(
statistic_id = metadata[meta_id]["statistic_id"]
convert: Callable[[Any, Any], float | None]
if convert_units:
convert = UNIT_CONVERSIONS.get(unit, lambda x, units: x) # type: ignore[arg-type,no-any-return]
convert = STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS.get(unit, lambda x, units: x) # type: ignore[arg-type,no-any-return]
else:
convert = no_conversion
ent_results = result[meta_id]
@@ -1323,17 +1344,26 @@ def adjust_statistics(
if statistic_id not in metadata:
return True
tables: tuple[type[Statistics | StatisticsShortTerm], ...] = (
Statistics,
units = instance.hass.config.units
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
display_unit = _configured_unit(statistic_unit, units)
convert = DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS.get(display_unit, lambda x, units: x) # type: ignore[arg-type]
sum_adjustment = convert(sum_adjustment, units)
_adjust_sum_statistics(
session,
StatisticsShortTerm,
metadata[statistic_id][0],
start_time,
sum_adjustment,
)
_adjust_sum_statistics(
session,
Statistics,
metadata[statistic_id][0],
start_time.replace(minute=0),
sum_adjustment,
)
for table in tables:
_adjust_sum_statistics(
session,
table,
metadata[statistic_id][0],
start_time,
sum_adjustment,
)
return True

View File

@@ -2,8 +2,8 @@
"domain": "remote_rpi_gpio",
"name": "remote_rpi_gpio",
"documentation": "https://www.home-assistant.io/integrations/remote_rpi_gpio",
"requirements": ["gpiozero==1.5.1"],
"requirements": ["gpiozero==1.6.2", "pigpio==1.78"],
"codeowners": [],
"iot_class": "local_push",
"loggers": ["gpiozero"]
"loggers": ["gpiozero", "pigpio"]
}

View File

@@ -3,7 +3,7 @@
"name": "Renault",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/renault",
"requirements": ["renault-api==0.1.10"],
"requirements": ["renault-api==0.1.11"],
"codeowners": ["@epenet"],
"iot_class": "cloud_polling",
"loggers": ["renault_api"],

View File

@@ -1,5 +1,6 @@
"""The roomba component."""
import asyncio
from functools import partial
import logging
import async_timeout
@@ -42,12 +43,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
},
)
roomba = RoombaFactory.create_roomba(
address=config_entry.data[CONF_HOST],
blid=config_entry.data[CONF_BLID],
password=config_entry.data[CONF_PASSWORD],
continuous=config_entry.options[CONF_CONTINUOUS],
delay=config_entry.options[CONF_DELAY],
roomba = await hass.async_add_executor_job(
partial(
RoombaFactory.create_roomba,
address=config_entry.data[CONF_HOST],
blid=config_entry.data[CONF_BLID],
password=config_entry.data[CONF_PASSWORD],
continuous=config_entry.options[CONF_CONTINUOUS],
delay=config_entry.options[CONF_DELAY],
)
)
try:

View File

@@ -1,6 +1,7 @@
"""Config flow to configure roomba component."""
import asyncio
from functools import partial
from roombapy import RoombaFactory
from roombapy.discovery import RoombaDiscovery
@@ -41,12 +42,15 @@ async def validate_input(hass: core.HomeAssistant, data):
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
roomba = RoombaFactory.create_roomba(
address=data[CONF_HOST],
blid=data[CONF_BLID],
password=data[CONF_PASSWORD],
continuous=False,
delay=data[CONF_DELAY],
roomba = await hass.async_add_executor_job(
partial(
RoombaFactory.create_roomba,
address=data[CONF_HOST],
blid=data[CONF_BLID],
password=data[CONF_PASSWORD],
continuous=False,
delay=data[CONF_DELAY],
)
)
info = await async_connect_or_timeout(hass, roomba)

View File

@@ -3,7 +3,7 @@
"name": "RTSPtoWebRTC",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rtsp_to_webrtc",
"requirements": ["rtsp-to-webrtc==0.5.0"],
"requirements": ["rtsp-to-webrtc==0.5.1"],
"dependencies": ["camera"],
"codeowners": ["@allenporter"],
"iot_class": "local_push",

View File

@@ -363,9 +363,8 @@ class SamsungTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
if not entry:
return None
entry_kw_args: dict = {}
if (
self.unique_id
and entry.unique_id is None
if self.unique_id and (
entry.unique_id is None
or (is_unique_match and self.unique_id != entry.unique_id)
):
entry_kw_args["unique_id"] = self.unique_id
@@ -469,6 +468,13 @@ class SamsungTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
await self._async_set_unique_id_from_udn()
self._async_update_and_abort_for_matching_unique_id()
self._async_abort_if_host_already_in_progress()
if self._method == METHOD_LEGACY and discovery_info.ssdp_st in (
UPNP_SVC_RENDERING_CONTROL,
UPNP_SVC_MAIN_TV_AGENT,
):
# The UDN we use for the unique id cannot be determined
# from device_info for legacy devices
return self.async_abort(reason="not_supported")
self.context["title_placeholders"] = {"device": self._title}
return await self.async_step_confirm()

View File

@@ -214,13 +214,19 @@ class SamsungTVDevice(MediaPlayerEntity):
)
if self._attr_state != STATE_ON:
if self._dmr_device and self._dmr_device.is_subscribed:
await self._dmr_device.async_unsubscribe_services()
return
startup_tasks: list[Coroutine[Any, Any, None]] = []
startup_tasks: list[Coroutine[Any, Any, Any]] = []
if not self._app_list_event.is_set():
startup_tasks.append(self._async_startup_app_list())
if self._dmr_device and not self._dmr_device.is_subscribed:
startup_tasks.append(
self._dmr_device.async_subscribe_services(auto_resubscribe=True)
)
if not self._dmr_device and self._ssdp_rendering_control_location:
startup_tasks.append(self._async_startup_dmr())
@@ -273,7 +279,10 @@ class SamsungTVDevice(MediaPlayerEntity):
if self._dmr_device is None:
session = async_get_clientsession(self.hass)
upnp_requester = AiohttpSessionRequester(session)
upnp_factory = UpnpFactory(upnp_requester)
# Set non_strict to avoid invalid data sent by Samsung TV:
# Got invalid value for <UpnpStateVariable(PlaybackStorageMedium, string)>:
# NETWORK,NONE
upnp_factory = UpnpFactory(upnp_requester, non_strict=True)
upnp_device: UpnpDevice | None = None
with contextlib.suppress(UpnpConnectionError):
upnp_device = await upnp_factory.async_create_device(

View File

@@ -157,9 +157,6 @@ class RpcShellyCover(ShellyRpcEntity, CoverEntity):
@property
def is_closed(self) -> bool | None:
"""If cover is closed."""
if not self.status["pos_control"]:
return None
return cast(bool, self.status["state"] == "closed")
@property

View File

@@ -130,6 +130,7 @@ async def async_setup_entry(
class SleepIQNumberEntity(SleepIQBedEntity, NumberEntity):
"""Representation of a SleepIQ number entity."""
entity_description: SleepIQNumberEntityDescription
_attr_icon = "mdi:bed"
def __init__(
@@ -140,7 +141,7 @@ class SleepIQNumberEntity(SleepIQBedEntity, NumberEntity):
description: SleepIQNumberEntityDescription,
) -> None:
"""Initialize the number."""
self.description = description
self.entity_description = description
self.device = device
self._attr_name = description.get_name_fn(bed, device)
@@ -151,10 +152,10 @@ class SleepIQNumberEntity(SleepIQBedEntity, NumberEntity):
@callback
def _async_update_attrs(self) -> None:
"""Update number attributes."""
self._attr_value = float(self.description.value_fn(self.device))
self._attr_value = float(self.entity_description.value_fn(self.device))
async def async_set_value(self, value: float) -> None:
"""Set the number value."""
await self.description.set_value_fn(self.device, int(value))
await self.entity_description.set_value_fn(self.device, int(value))
self._attr_value = value
self.async_write_ha_state()

View File

@@ -3,7 +3,7 @@
"name": "SMA Solar",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sma",
"requirements": ["pysma==0.6.10"],
"requirements": ["pysma==0.6.11"],
"codeowners": ["@kellerza", "@rklomp"],
"iot_class": "local_polling",
"loggers": ["pysma"]

View File

@@ -28,10 +28,10 @@ create_zone:
description: Name of slaves entities to add to the new zone.
required: true
selector:
target:
entity:
integration: soundtouch
domain: media_player
entity:
multiple: true
integration: soundtouch
domain: media_player
add_zone_slave:
name: Add zone slave
@@ -50,10 +50,10 @@ add_zone_slave:
description: Name of slaves entities to add to the existing zone.
required: true
selector:
target:
entity:
integration: soundtouch
domain: media_player
entity:
multiple: true
integration: soundtouch
domain: media_player
remove_zone_slave:
name: Remove zone slave
@@ -72,7 +72,7 @@ remove_zone_slave:
description: Name of slaves entities to remove from the existing zone.
required: true
selector:
target:
entity:
integration: soundtouch
domain: media_player
entity:
multiple: true
integration: soundtouch
domain: media_player

View File

@@ -47,7 +47,7 @@ from .util import fetch_image_url
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=30)
SCAN_INTERVAL = timedelta(minutes=1)
SUPPORT_SPOTIFY = (
SUPPORT_BROWSE_MEDIA
@@ -117,6 +117,7 @@ class SpotifyMediaPlayer(MediaPlayerEntity):
_attr_icon = "mdi:spotify"
_attr_media_content_type = MEDIA_TYPE_MUSIC
_attr_media_image_remotely_accessible = False
_attr_entity_registry_enabled_default = False
def __init__(
self,

View File

@@ -5,3 +5,4 @@ KNOWN_PLAYERS = "known_players"
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
DISCOVERY_TASK = "discovery_task"
DEFAULT_PORT = 9000
SQUEEZEBOX_SOURCE_STRINGS = ("source:", "wavin:", "spotify:")

View File

@@ -63,7 +63,13 @@ from .browse_media import (
library_payload,
media_source_content_filter,
)
from .const import DISCOVERY_TASK, DOMAIN, KNOWN_PLAYERS, PLAYER_DISCOVERY_UNSUB
from .const import (
DISCOVERY_TASK,
DOMAIN,
KNOWN_PLAYERS,
PLAYER_DISCOVERY_UNSUB,
SQUEEZEBOX_SOURCE_STRINGS,
)
SERVICE_CALL_METHOD = "call_method"
SERVICE_CALL_QUERY = "call_query"
@@ -475,7 +481,9 @@ class SqueezeBoxEntity(MediaPlayerEntity):
media_id = play_item.url
if media_type in MEDIA_TYPE_MUSIC:
media_id = async_process_play_media_url(self.hass, media_id)
if not media_id.startswith(SQUEEZEBOX_SOURCE_STRINGS):
# do not process special squeezebox "source" media ids
media_id = async_process_play_media_url(self.hass, media_id)
await self._player.async_load_url(media_id, cmd)
return

View File

@@ -2,7 +2,7 @@
"domain": "stream",
"name": "Stream",
"documentation": "https://www.home-assistant.io/integrations/stream",
"requirements": ["PyTurboJPEG==1.6.6", "av==9.0.0"],
"requirements": ["PyTurboJPEG==1.6.6", "av==8.1.0"],
"dependencies": ["http"],
"codeowners": ["@hunterjm", "@uvjustin", "@allenporter"],
"quality_scale": "internal",

View File

@@ -130,9 +130,12 @@ class Sun(Entity):
self._config_listener = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, self.update_location
)
self._loaded_listener = self.hass.bus.async_listen(
EVENT_COMPONENT_LOADED, self.loading_complete
)
if DOMAIN in hass.config.components:
self.update_location()
else:
self._loaded_listener = self.hass.bus.async_listen(
EVENT_COMPONENT_LOADED, self.loading_complete
)
@callback
def loading_complete(self, event_: Event) -> None:
@@ -158,6 +161,7 @@ class Sun(Entity):
"""Remove the loaded listener."""
if self._loaded_listener:
self._loaded_listener()
self._loaded_listener = None
@callback
def remove_listeners(self):

View File

@@ -18,7 +18,10 @@ from homeassistant.util import Throttle
from .const import (
CONF_FALLBACK,
CONST_OVERLAY_MANUAL,
CONST_OVERLAY_TADO_DEFAULT,
CONST_OVERLAY_TADO_MODE,
CONST_OVERLAY_TADO_OPTIONS,
DATA,
DOMAIN,
INSIDE_TEMPERATURE_MEASUREMENT,
@@ -51,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_MODE)
fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT)
tadoconnector = TadoConnector(hass, username, password, fallback)
@@ -99,7 +102,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
options = dict(entry.options)
if CONF_FALLBACK not in options:
options[CONF_FALLBACK] = entry.data.get(CONF_FALLBACK, CONST_OVERLAY_TADO_MODE)
options[CONF_FALLBACK] = entry.data.get(
CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT
)
hass.config_entries.async_update_entry(entry, options=options)
if options[CONF_FALLBACK] not in CONST_OVERLAY_TADO_OPTIONS:
if options[CONF_FALLBACK]:
options[CONF_FALLBACK] = CONST_OVERLAY_TADO_MODE
else:
options[CONF_FALLBACK] = CONST_OVERLAY_MANUAL
hass.config_entries.async_update_entry(entry, options=options)

View File

@@ -11,7 +11,13 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from .const import CONF_FALLBACK, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, UNIQUE_ID
from .const import (
CONF_FALLBACK,
CONST_OVERLAY_TADO_DEFAULT,
CONST_OVERLAY_TADO_OPTIONS,
DOMAIN,
UNIQUE_ID,
)
_LOGGER = logging.getLogger(__name__)
@@ -126,7 +132,10 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
data_schema = vol.Schema(
{
vol.Optional(
CONF_FALLBACK, default=self.config_entry.options.get(CONF_FALLBACK)
CONF_FALLBACK,
default=self.config_entry.options.get(
CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT
),
): vol.In(CONST_OVERLAY_TADO_OPTIONS),
}
)

View File

@@ -1,17 +1,6 @@
"""Support for Telegram bot to send messages only."""
import logging
from . import initialize_bot
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config):
async def async_setup_platform(hass, bot, config):
"""Set up the Telegram broadcast platform."""
bot = initialize_bot(config)
bot_config = await hass.async_add_executor_job(bot.getMe)
_LOGGER.debug(
"Telegram broadcast platform setup with bot %s", bot_config["username"]
)
return True

View File

@@ -331,6 +331,7 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity
and self._state is None
):
self._state = last_state.state == STATE_ON
self.restore_attributes(last_state)
if CONF_AUTO_OFF not in self._config:
return

View File

@@ -4,8 +4,16 @@ from __future__ import annotations
import logging
from typing import Any
from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON, CONF_NAME, CONF_UNIQUE_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.const import (
ATTR_ENTITY_PICTURE,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
CONF_DEVICE_CLASS,
CONF_ICON,
CONF_NAME,
CONF_UNIQUE_ID,
)
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@@ -13,6 +21,12 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import TriggerUpdateCoordinator
from .const import CONF_ATTRIBUTES, CONF_AVAILABILITY, CONF_PICTURE
CONF_TO_ATTRIBUTE = {
CONF_ICON: ATTR_ICON,
CONF_NAME: ATTR_FRIENDLY_NAME,
CONF_PICTURE: ATTR_ENTITY_PICTURE,
}
class TriggerEntity(CoordinatorEntity[TriggerUpdateCoordinator]):
"""Template entity based on trigger data."""
@@ -45,10 +59,10 @@ class TriggerEntity(CoordinatorEntity[TriggerUpdateCoordinator]):
self._to_render_complex: list[str] = []
for itm in (
CONF_NAME,
CONF_ICON,
CONF_PICTURE,
CONF_AVAILABILITY,
CONF_ICON,
CONF_NAME,
CONF_PICTURE,
):
if itm not in config:
continue
@@ -115,6 +129,21 @@ class TriggerEntity(CoordinatorEntity[TriggerUpdateCoordinator]):
if self.coordinator.data is not None:
self._process_data()
def restore_attributes(self, last_state: State) -> None:
"""Restore attributes."""
for conf_key, attr in CONF_TO_ATTRIBUTE.items():
if conf_key not in self._config or attr not in last_state.attributes:
continue
self._rendered[conf_key] = last_state.attributes[attr]
if CONF_ATTRIBUTES in self._config:
extra_state_attributes = {}
for attr in self._config[CONF_ATTRIBUTES]:
if attr not in last_state.attributes:
continue
extra_state_attributes[attr] = last_state.attributes[attr]
self._rendered[CONF_ATTRIBUTES] = extra_state_attributes
@callback
def _process_data(self) -> None:
"""Process new data."""

View File

@@ -388,5 +388,5 @@ class Timer(RestoreEntity):
"""Handle when the config is updated."""
self._config = config
self._duration = cv.time_period_str(config[CONF_DURATION])
self._restore = config[CONF_RESTORE]
self._restore = config.get(CONF_RESTORE, DEFAULT_RESTORE)
self.async_write_ha_state()

View File

@@ -29,7 +29,7 @@ INTEGRATION_NAME = "Tomorrow.io"
DEFAULT_NAME = INTEGRATION_NAME
ATTRIBUTION = "Powered by Tomorrow.io"
MAX_REQUESTS_PER_DAY = 500
MAX_REQUESTS_PER_DAY = 100
CLEAR_CONDITIONS = {"night": ATTR_CONDITION_CLEAR_NIGHT, "day": ATTR_CONDITION_SUNNY}

View File

@@ -101,9 +101,9 @@ RANDOM_EFFECT_DICT: Final = {
cv.ensure_list_csv, [vol.Coerce(int)], HSV_SEQUENCE
),
vol.Optional("random_seed", default=100): vol.All(
vol.Coerce(int), vol.Range(min=1, max=100)
vol.Coerce(int), vol.Range(min=1, max=600)
),
vol.Required("backgrounds"): vol.All(
vol.Optional("backgrounds"): vol.All(
cv.ensure_list,
vol.Length(min=1, max=16),
[vol.All(vol.Coerce(tuple), HSV_SEQUENCE)],
@@ -318,15 +318,6 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
device: SmartLightStrip
def __init__(
self,
device: SmartLightStrip,
coordinator: TPLinkDataUpdateCoordinator,
) -> None:
"""Initialize the smart light strip."""
super().__init__(device, coordinator)
self._last_custom_effect: dict[str, Any] = {}
@property
def supported_features(self) -> int:
"""Flag supported features."""
@@ -351,6 +342,11 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
"""Turn the light on."""
brightness, transition = self._async_extract_brightness_transition(**kwargs)
if ATTR_COLOR_TEMP in kwargs:
if self.effect:
# If there is an effect in progress
# we have to set an HSV value to clear the effect
# before we can set a color temp
await self.device.set_hsv(0, 0, brightness)
await self._async_set_color_temp(
int(kwargs[ATTR_COLOR_TEMP]), brightness, transition
)
@@ -358,20 +354,6 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
await self._async_set_hsv(kwargs[ATTR_HS_COLOR], brightness, transition)
elif ATTR_EFFECT in kwargs:
await self.device.set_effect(kwargs[ATTR_EFFECT])
elif (
self.device.is_off
and self.device.effect
and self.device.effect["enable"] == 0
and self.device.effect["name"]
):
if not self.device.effect["custom"]:
await self.device.set_effect(self.device.effect["name"])
elif self._last_custom_effect:
await self.device.set_custom_effect(self._last_custom_effect)
# The device does not remember custom effects
# so we must set a default value or it can never turn back on
else:
await self.device.set_hsv(0, 0, 100, transition=transition)
else:
await self._async_turn_on_with_brightness(brightness, transition)
@@ -384,7 +366,7 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
fadeoff: int,
init_states: tuple[int, int, int],
random_seed: int,
backgrounds: Sequence[tuple[int, int, int]],
backgrounds: Sequence[tuple[int, int, int]] | None = None,
hue_range: tuple[int, int] | None = None,
saturation_range: tuple[int, int] | None = None,
brightness_range: tuple[int, int] | None = None,
@@ -396,8 +378,9 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
"type": "random",
"init_states": [init_states],
"random_seed": random_seed,
"backgrounds": backgrounds,
}
if backgrounds:
effect["backgrounds"] = backgrounds
if fadeoff:
effect["fadeoff"] = fadeoff
if hue_range:
@@ -412,7 +395,6 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
if transition_range:
effect["transition_range"] = transition_range
effect["transition"] = 0
self._last_custom_effect = effect
await self.device.set_custom_effect(effect)
async def async_set_sequence_effect(
@@ -434,5 +416,4 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
"spread": spread,
"direction": direction,
}
self._last_custom_effect = effect
await self.device.set_custom_effect(effect)

View File

@@ -3,7 +3,7 @@
"name": "TP-Link Kasa Smart",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tplink",
"requirements": ["python-kasa==0.4.2"],
"requirements": ["python-kasa==0.4.3"],
"codeowners": ["@rytilahti", "@thegardenmonkey"],
"dependencies": ["network"],
"quality_scale": "platinum",

View File

@@ -93,7 +93,7 @@ random_effect:
- [199, 89, 50]
- [160, 50, 50]
- [180, 100, 50]
required: true
required: false
selector:
object:
segments:
@@ -180,4 +180,4 @@ random_effect:
number:
min: 1
step: 1
max: 100
max: 600

View File

@@ -116,9 +116,6 @@ class ProtectFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
self.hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_HOST: new_host}
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(entry.entry_id)
)
return self.async_abort(reason="already_configured")
if entry_host in (direct_connect_domain, source_ip) or (
entry_has_direct_connect

View File

@@ -3,7 +3,7 @@
"name": "UniFi Protect",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/unifiprotect",
"requirements": ["pyunifiprotect==3.2.0", "unifi-discovery==1.1.2"],
"requirements": ["pyunifiprotect==3.3.0", "unifi-discovery==1.1.2"],
"dependencies": ["http"],
"codeowners": ["@briis", "@AngellusMortis", "@bdraco"],
"quality_scale": "platinum",

View File

@@ -137,7 +137,7 @@ CAMERA_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = (
name="Detections: Person",
icon="mdi:walk",
entity_category=EntityCategory.CONFIG,
ufp_required_field="feature_flags.has_smart_detect",
ufp_required_field="can_detect_person",
ufp_value="is_person_detection_on",
ufp_set_method="set_person_detection",
),
@@ -146,10 +146,19 @@ CAMERA_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = (
name="Detections: Vehicle",
icon="mdi:car",
entity_category=EntityCategory.CONFIG,
ufp_required_field="feature_flags.has_smart_detect",
ufp_required_field="can_detect_vehicle",
ufp_value="is_vehicle_detection_on",
ufp_set_method="set_vehicle_detection",
),
ProtectSwitchEntityDescription(
key="smart_face",
name="Detections: Face",
icon="mdi:human-greeting",
entity_category=EntityCategory.CONFIG,
ufp_required_field="can_detect_face",
ufp_value="is_face_detection_on",
ufp_set_method="set_face_detection",
),
)
SENSE_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = (

View File

@@ -327,13 +327,15 @@ class UpdateEntity(RestoreEntity):
if latest_version == self.__skipped_version:
return STATE_OFF
if latest_version == installed_version:
return STATE_OFF
try:
newer = AwesomeVersion(latest_version) > installed_version
return STATE_ON if newer else STATE_OFF
except AwesomeVersionCompareException:
# Can't compare versions, fallback to exact match
return STATE_OFF if latest_version == installed_version else STATE_ON
# Can't compare versions, already tried exact match
return STATE_ON
@final
@property

View File

@@ -106,27 +106,24 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_reset_meters(service_call):
"""Reset all sensors of a meter."""
entity_id = service_call.data["entity_id"]
meters = service_call.data["entity_id"]
domain = split_entity_id(entity_id)[0]
if domain == DOMAIN:
for entity in hass.data[DATA_LEGACY_COMPONENT].entities:
if entity_id == entity.entity_id:
_LOGGER.debug(
"forward reset meter from %s to %s",
entity_id,
entity.tracked_entity_id,
)
entity_id = entity.tracked_entity_id
_LOGGER.debug("reset meter %s", entity_id)
async_dispatcher_send(hass, SIGNAL_RESET_METER, entity_id)
for meter in meters:
_LOGGER.debug("resetting meter %s", meter)
domain, entity = split_entity_id(meter)
# backward compatibility up to 2022.07:
if domain == DOMAIN:
async_dispatcher_send(
hass, SIGNAL_RESET_METER, f"{SELECT_DOMAIN}.{entity}"
)
else:
async_dispatcher_send(hass, SIGNAL_RESET_METER, meter)
hass.services.async_register(
DOMAIN,
SERVICE_RESET,
async_reset_meters,
vol.Schema({ATTR_ENTITY_ID: cv.entity_id}),
vol.Schema({ATTR_ENTITY_ID: vol.All(cv.ensure_list, [cv.entity_id])}),
)
if DOMAIN not in config:

View File

@@ -6,7 +6,6 @@ reset:
target:
entity:
domain: select
integration: utility_meter
next_tariff:
name: Next Tariff

View File

@@ -2,7 +2,7 @@
"domain": "version",
"name": "Version",
"documentation": "https://www.home-assistant.io/integrations/version",
"requirements": ["pyhaversion==22.02.0"],
"requirements": ["pyhaversion==22.4.1"],
"codeowners": ["@fabaff", "@ludeeus"],
"quality_scale": "internal",
"iot_class": "local_push",

View File

@@ -29,6 +29,8 @@ from .const import (
CONF_SERIAL_NUMBER_KEY,
CONF_SOFTWARE_KEY,
CONF_STATION,
CONF_STATUS_DESCRIPTION_KEY,
CONF_STATUS_ID_KEY,
DOMAIN,
)
@@ -37,6 +39,39 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.SENSOR, Platform.NUMBER, Platform.LOCK]
UPDATE_INTERVAL = 30
# Translation of StatusId based on Wallbox portal code:
# https://my.wallbox.com/src/utilities/charger/chargerStatuses.js
CHARGER_STATUS: dict[int, str] = {
0: "Disconnected",
14: "Error",
15: "Error",
161: "Ready",
162: "Ready",
163: "Disconnected",
164: "Waiting",
165: "Locked",
166: "Updating",
177: "Scheduled",
178: "Paused",
179: "Scheduled",
180: "Waiting for car demand",
181: "Waiting for car demand",
182: "Paused",
183: "Waiting in queue by Power Sharing",
184: "Waiting in queue by Power Sharing",
185: "Waiting in queue by Power Boost",
186: "Waiting in queue by Power Boost",
187: "Waiting MID failed",
188: "Waiting MID safety margin exceeded",
189: "Waiting in queue by Eco-Smart",
193: "Charging",
194: "Charging",
195: "Charging",
196: "Discharging",
209: "Locked",
210: "Locked",
}
class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Wallbox Coordinator class."""
@@ -86,6 +121,9 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
data[CONF_LOCKED_UNLOCKED_KEY] = data[CONF_DATA_KEY][
CONF_LOCKED_UNLOCKED_KEY
]
data[CONF_STATUS_DESCRIPTION_KEY] = CHARGER_STATUS.get(
data[CONF_STATUS_ID_KEY], "Unknown"
)
return data

View File

@@ -21,5 +21,6 @@ CONF_MAX_CHARGING_CURRENT_KEY = "max_charging_current"
CONF_LOCKED_UNLOCKED_KEY = "locked"
CONF_NAME_KEY = "name"
CONF_STATE_OF_CHARGE_KEY = "state_of_charge"
CONF_STATUS_ID_KEY = "status_id"
CONF_STATUS_DESCRIPTION_KEY = "status_description"
CONF_CONNECTIONS = "connections"

View File

@@ -2,7 +2,7 @@
"domain": "xmpp",
"name": "Jabber (XMPP)",
"documentation": "https://www.home-assistant.io/integrations/xmpp",
"requirements": ["slixmpp==1.8.0.1"],
"requirements": ["slixmpp==1.8.2"],
"codeowners": ["@fabaff", "@flowolf"],
"iot_class": "cloud_push",
"loggers": ["pyasn1", "slixmpp"]

View File

@@ -232,7 +232,7 @@ GROUP_MEMBER_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(ATTR_IEEE): IEEE_SCHEMA,
vol.Required(ATTR_ENDPOINT_ID): int,
vol.Required(ATTR_ENDPOINT_ID): vol.Coerce(int),
}
),
_cv_group_member,
@@ -244,8 +244,8 @@ CLUSTER_BINDING_SCHEMA = vol.All(
{
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_TYPE): cv.string,
vol.Required(ATTR_ID): int,
vol.Required(ATTR_ENDPOINT_ID): int,
vol.Required(ATTR_ID): vol.Coerce(int),
vol.Required(ATTR_ENDPOINT_ID): vol.Coerce(int),
}
),
_cv_cluster_binding,

View File

@@ -8,7 +8,12 @@ import logging
from typing import Any
import zigpy.exceptions
from zigpy.zcl.foundation import ConfigureReportingResponseRecord, Status
from zigpy.zcl.foundation import (
CommandSchema,
ConfigureReportingResponseRecord,
Status,
ZCLAttributeDef,
)
from homeassistant.const import ATTR_COMMAND
from homeassistant.core import callback
@@ -20,6 +25,7 @@ from ..const import (
ATTR_ATTRIBUTE_ID,
ATTR_ATTRIBUTE_NAME,
ATTR_CLUSTER_ID,
ATTR_PARAMS,
ATTR_TYPE,
ATTR_UNIQUE_ID,
ATTR_VALUE,
@@ -111,7 +117,11 @@ class ZigbeeChannel(LogMixin):
if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG:
attr = self.REPORT_CONFIG[0].get("attr")
if isinstance(attr, str):
self.value_attribute = self.cluster.attributes_by_name.get(attr)
attribute: ZCLAttributeDef = self.cluster.attributes_by_name.get(attr)
if attribute is not None:
self.value_attribute = attribute.id
else:
self.value_attribute = None
else:
self.value_attribute = attr
self._status = ChannelStatus.CREATED
@@ -354,14 +364,27 @@ class ZigbeeChannel(LogMixin):
"""Handle ZDO commands on this cluster."""
@callback
def zha_send_event(self, command: str, args: int | dict) -> None:
def zha_send_event(self, command: str, arg: list | dict | CommandSchema) -> None:
"""Relay events to hass."""
if isinstance(arg, CommandSchema):
args = [a for a in arg if a is not None]
params = arg.as_dict()
elif isinstance(arg, (list, dict)):
# Quirks can directly send lists and dicts to ZHA this way
args = arg
params = {}
else:
raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}")
self._ch_pool.zha_send_event(
{
ATTR_UNIQUE_ID: self.unique_id,
ATTR_CLUSTER_ID: self.cluster.cluster_id,
ATTR_COMMAND: command,
# Maintain backwards compatibility with the old zigpy response format
ATTR_ARGS: args,
ATTR_PARAMS: params,
}
)

View File

@@ -43,6 +43,7 @@ ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_PARAMS = "params"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"

View File

@@ -661,7 +661,11 @@ class ZHADevice(LogMixin):
async def async_add_to_group(self, group_id: int) -> None:
"""Add this device to the provided zigbee group."""
try:
await self._zigpy_device.add_to_group(group_id)
# A group name is required. However, the spec also explicitly states that
# the group name can be ignored by the receiving device if a device cannot
# store it, so we cannot rely on it existing after being written. This is
# only done to make the ZCL command valid.
await self._zigpy_device.add_to_group(group_id, name=f"0x{group_id:04X}")
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
self.debug(
"Failed to add device '%s' to group: 0x%04x ex: %s",
@@ -687,7 +691,9 @@ class ZHADevice(LogMixin):
) -> None:
"""Add the device endpoint to the provided zigbee group."""
try:
await self._zigpy_device.endpoints[endpoint_id].add_to_group(group_id)
await self._zigpy_device.endpoints[endpoint_id].add_to_group(
group_id, name=f"0x{group_id:04X}"
)
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
self.debug(
"Failed to add endpoint: %s for device: '%s' to group: 0x%04x ex: %s",

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
import collections
import logging
from typing import TYPE_CHECKING, Any, NamedTuple
@@ -30,9 +29,12 @@ class GroupMember(NamedTuple):
endpoint_id: int
GroupEntityReference = collections.namedtuple(
"GroupEntityReference", "name original_name entity_id"
)
class GroupEntityReference(NamedTuple):
"""Reference to a group entity."""
name: str
original_name: str
entity_id: int
class ZHAGroupMember(LogMixin):

View File

@@ -0,0 +1,79 @@
"""Provides diagnostics for ZHA."""
from __future__ import annotations
import dataclasses
from typing import Any
import bellows
import pkg_resources
import zigpy
from zigpy.config import CONF_NWK_EXTENDED_PAN_ID
import zigpy_deconz
import zigpy_xbee
import zigpy_zigate
import zigpy_znp
from homeassistant.components.diagnostics.util import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_UNIQUE_ID
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from .core.const import ATTR_IEEE, DATA_ZHA, DATA_ZHA_CONFIG, DATA_ZHA_GATEWAY
from .core.device import ZHADevice
from .core.gateway import ZHAGateway
from .core.helpers import async_get_zha_device
KEYS_TO_REDACT = {
ATTR_IEEE,
CONF_UNIQUE_ID,
"network_key",
CONF_NWK_EXTENDED_PAN_ID,
}
def shallow_asdict(obj: Any) -> dict:
"""Return a shallow copy of a dataclass as a dict."""
if hasattr(obj, "__dataclass_fields__"):
result = {}
for field in dataclasses.fields(obj):
result[field.name] = shallow_asdict(getattr(obj, field.name))
return result
if hasattr(obj, "as_dict"):
return obj.as_dict()
return obj
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict:
"""Return diagnostics for a config entry."""
config: dict = hass.data[DATA_ZHA][DATA_ZHA_CONFIG]
gateway: ZHAGateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
return async_redact_data(
{
"config": config,
"config_entry": config_entry.as_dict(),
"application_state": shallow_asdict(gateway.application_controller.state),
"versions": {
"bellows": bellows.__version__,
"zigpy": zigpy.__version__,
"zigpy_deconz": zigpy_deconz.__version__,
"zigpy_xbee": zigpy_xbee.__version__,
"zigpy_znp": zigpy_znp.__version__,
"zigpy_zigate": zigpy_zigate.__version__,
"zhaquirks": pkg_resources.get_distribution("zha-quirks").version,
},
},
KEYS_TO_REDACT,
)
async def async_get_device_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry
) -> dict:
"""Return diagnostics for a device."""
zha_device: ZHADevice = await async_get_zha_device(hass, device.id)
return async_redact_data(zha_device.zha_device_info, KEYS_TO_REDACT)

View File

@@ -7,9 +7,9 @@
"bellows==0.29.0",
"pyserial==3.5",
"pyserial-asyncio==0.6",
"zha-quirks==0.0.69",
"zigpy-deconz==0.15.0",
"zigpy==0.44.1",
"zha-quirks==0.0.72",
"zigpy-deconz==0.14.0",
"zigpy==0.44.2",
"zigpy-xbee==0.14.0",
"zigpy-zigate==0.8.0",
"zigpy-znp==0.7.0"

Some files were not shown because too many files have changed in this diff Show More