mirror of
https://github.com/home-assistant/core.git
synced 2026-01-05 23:35:24 +01:00
Compare commits
158 Commits
2022.2.0b4
...
2022.2.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a96b91d120 | ||
|
|
2e6ee5165e | ||
|
|
7dd7c1dadd | ||
|
|
4c548af6ef | ||
|
|
200e07b8d6 | ||
|
|
ae5a885387 | ||
|
|
bebdaacf47 | ||
|
|
339fc0a2af | ||
|
|
f44ca5f9d5 | ||
|
|
a869c1bc88 | ||
|
|
d5443b8dee | ||
|
|
6ec09320dd | ||
|
|
550f80ddd2 | ||
|
|
23d2168952 | ||
|
|
c1cb0a0f8e | ||
|
|
e53227be79 | ||
|
|
c8c1543b26 | ||
|
|
715fe95abd | ||
|
|
02cb879717 | ||
|
|
9734216215 | ||
|
|
0f06ebde06 | ||
|
|
7195372616 | ||
|
|
ac63a7e01e | ||
|
|
f08ebf5b7e | ||
|
|
49d6048278 | ||
|
|
ceae63d457 | ||
|
|
f170aba0cc | ||
|
|
66e076b57f | ||
|
|
1338b347b5 | ||
|
|
9b471ab653 | ||
|
|
e90a6bbe1c | ||
|
|
aa9965675d | ||
|
|
ad3b2f02b4 | ||
|
|
0dbe9b7cf4 | ||
|
|
b9d346baed | ||
|
|
7791711603 | ||
|
|
fdfffcb73e | ||
|
|
8e6bd840a4 | ||
|
|
619a52a387 | ||
|
|
a4d59aa599 | ||
|
|
4ba494f5cd | ||
|
|
7a7f9deb89 | ||
|
|
5786f68bb7 | ||
|
|
bccfe6646e | ||
|
|
fc7ea6e1b3 | ||
|
|
058420bb2f | ||
|
|
9695235920 | ||
|
|
57526bd21f | ||
|
|
eff9690c8a | ||
|
|
d754ea1645 | ||
|
|
5f6214ede7 | ||
|
|
0f02ae981d | ||
|
|
51abdf9c63 | ||
|
|
1a2e9aaaed | ||
|
|
56d1fc6dad | ||
|
|
5a44f8eadd | ||
|
|
609661a862 | ||
|
|
27dbf98dae | ||
|
|
6cf2665200 | ||
|
|
5aa02b884e | ||
|
|
84b2ec2244 | ||
|
|
35f2536d46 | ||
|
|
e6e95a1131 | ||
|
|
ea1245f308 | ||
|
|
9cd6bb7335 | ||
|
|
4e3cd1471a | ||
|
|
67a9932c5c | ||
|
|
0efa276fca | ||
|
|
c6d5a0842b | ||
|
|
b004c5deb6 | ||
|
|
06b6b176db | ||
|
|
9eeaec4f79 | ||
|
|
3babc43fa5 | ||
|
|
a670317b80 | ||
|
|
f44f1f0c4a | ||
|
|
b450a41d7b | ||
|
|
88c3ab1113 | ||
|
|
f1c8fc241a | ||
|
|
e5b9d5baa3 | ||
|
|
3c43089cc2 | ||
|
|
c8827e00b3 | ||
|
|
3a1a12b13e | ||
|
|
2928935838 | ||
|
|
b9a37e2c3e | ||
|
|
92f4f99d41 | ||
|
|
e32a54eecc | ||
|
|
2bb65ecf38 | ||
|
|
afbc55b181 | ||
|
|
931c27f452 | ||
|
|
689133976a | ||
|
|
faa8ac692e | ||
|
|
ec0b0e41a1 | ||
|
|
6550d04313 | ||
|
|
8d33964e4d | ||
|
|
d195e8a1b4 | ||
|
|
a8b29c4be9 | ||
|
|
f7ec373aab | ||
|
|
1ae2bfcc89 | ||
|
|
1155d229f3 | ||
|
|
2f638a6b5e | ||
|
|
580573fcb3 | ||
|
|
8851af7dba | ||
|
|
ec2e450442 | ||
|
|
dacf5957d2 | ||
|
|
5190282b4d | ||
|
|
51c6cac74d | ||
|
|
1809489421 | ||
|
|
690764ec84 | ||
|
|
2b0e828736 | ||
|
|
91023cf132 | ||
|
|
fcd14e2830 | ||
|
|
40a174cc70 | ||
|
|
95d4be375c | ||
|
|
37f9c833c0 | ||
|
|
b902c59504 | ||
|
|
ba237fd383 | ||
|
|
b687f68d53 | ||
|
|
f3c39d8dca | ||
|
|
19fff6489b | ||
|
|
4f8752b351 | ||
|
|
03bd3f5001 | ||
|
|
055382c84c | ||
|
|
68651be2cc | ||
|
|
09c2c129b9 | ||
|
|
7fe1b85495 | ||
|
|
5082582769 | ||
|
|
b7c7571a39 | ||
|
|
63a90b7226 | ||
|
|
5735762af2 | ||
|
|
90127d04fa | ||
|
|
114da0bd4f | ||
|
|
5c3d4cb9a5 | ||
|
|
3f8d2f3102 | ||
|
|
eea9e26ef5 | ||
|
|
649b4ce329 | ||
|
|
1facd0edd4 | ||
|
|
1fbd624a24 | ||
|
|
0a000babc9 | ||
|
|
74632d26fa | ||
|
|
87b20c6abe | ||
|
|
ea511357b6 | ||
|
|
00b2c85e98 | ||
|
|
961cf15e6e | ||
|
|
7117395489 | ||
|
|
5dc92bb2ce | ||
|
|
0519b29501 | ||
|
|
4f8e19ed4a | ||
|
|
fd7f66fbdc | ||
|
|
9294319048 | ||
|
|
c1019394ed | ||
|
|
0885d48186 | ||
|
|
13ad1cc56c | ||
|
|
c5d68f8669 | ||
|
|
2757976a5a | ||
|
|
73750d8a25 | ||
|
|
2eef05eb84 | ||
|
|
3446c95cd3 | ||
|
|
cdcbb87d97 |
@@ -27,6 +27,7 @@ omit =
|
||||
homeassistant/components/adguard/sensor.py
|
||||
homeassistant/components/adguard/switch.py
|
||||
homeassistant/components/ads/*
|
||||
homeassistant/components/advantage_air/diagnostics.py
|
||||
homeassistant/components/aemet/weather_update_coordinator.py
|
||||
homeassistant/components/aftership/*
|
||||
homeassistant/components/agent_dvr/alarm_control_panel.py
|
||||
@@ -375,6 +376,7 @@ omit =
|
||||
homeassistant/components/fritz/common.py
|
||||
homeassistant/components/fritz/const.py
|
||||
homeassistant/components/fritz/device_tracker.py
|
||||
homeassistant/components/fritz/diagnostics.py
|
||||
homeassistant/components/fritz/sensor.py
|
||||
homeassistant/components/fritz/services.py
|
||||
homeassistant/components/fritz/switch.py
|
||||
@@ -462,6 +464,7 @@ omit =
|
||||
homeassistant/components/homematic/*
|
||||
homeassistant/components/home_plus_control/api.py
|
||||
homeassistant/components/home_plus_control/switch.py
|
||||
homeassistant/components/homewizard/diagnostics.py
|
||||
homeassistant/components/homeworks/*
|
||||
homeassistant/components/honeywell/__init__.py
|
||||
homeassistant/components/honeywell/climate.py
|
||||
@@ -944,6 +947,7 @@ omit =
|
||||
homeassistant/components/sabnzbd/*
|
||||
homeassistant/components/saj/sensor.py
|
||||
homeassistant/components/samsungtv/bridge.py
|
||||
homeassistant/components/samsungtv/diagnostics.py
|
||||
homeassistant/components/satel_integra/*
|
||||
homeassistant/components/schluter/*
|
||||
homeassistant/components/scrape/sensor.py
|
||||
|
||||
@@ -17,7 +17,7 @@ def timeout(
|
||||
loop = asyncio.get_running_loop()
|
||||
else:
|
||||
report(
|
||||
"called async_timeout.timeout with loop keyword argument. The loop keyword argument is deprecated and calls will fail after Home Assistant 2022.2",
|
||||
"called async_timeout.timeout with loop keyword argument. The loop keyword argument is deprecated and calls will fail after Home Assistant 2022.3",
|
||||
error_if_core=False,
|
||||
)
|
||||
if delay is not None:
|
||||
@@ -30,7 +30,7 @@ def timeout(
|
||||
def current_task(loop: asyncio.AbstractEventLoop) -> asyncio.Task[Any] | None:
|
||||
"""Backwards compatible current_task."""
|
||||
report(
|
||||
"called async_timeout.current_task. The current_task call is deprecated and calls will fail after Home Assistant 2022.2; use asyncio.current_task instead",
|
||||
"called async_timeout.current_task. The current_task call is deprecated and calls will fail after Home Assistant 2022.3; use asyncio.current_task instead",
|
||||
error_if_core=False,
|
||||
)
|
||||
return asyncio.current_task()
|
||||
|
||||
@@ -17,7 +17,12 @@ from homeassistant.components.weather import (
|
||||
WeatherEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, TEMP_CELSIUS, TEMP_FAHRENHEIT
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
SPEED_MILES_PER_HOUR,
|
||||
TEMP_CELSIUS,
|
||||
TEMP_FAHRENHEIT,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
@@ -62,6 +67,13 @@ class AccuWeatherEntity(CoordinatorEntity, WeatherEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL
|
||||
wind_speed_unit = self.coordinator.data["Wind"]["Speed"][self._unit_system][
|
||||
"Unit"
|
||||
]
|
||||
if wind_speed_unit == "mi/h":
|
||||
self._attr_wind_speed_unit = SPEED_MILES_PER_HOUR
|
||||
else:
|
||||
self._attr_wind_speed_unit = wind_speed_unit
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_temperature_unit = (
|
||||
|
||||
@@ -80,8 +80,8 @@ class AdGuardHomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
adguard = AdGuardHome(
|
||||
user_input[CONF_HOST],
|
||||
port=user_input[CONF_PORT],
|
||||
username=username, # type:ignore[arg-type]
|
||||
password=password, # type:ignore[arg-type]
|
||||
username=username,
|
||||
password=password,
|
||||
tls=user_input[CONF_SSL],
|
||||
verify_ssl=user_input[CONF_VERIFY_SSL],
|
||||
session=session,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "AdGuard Home",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/adguard",
|
||||
"requirements": ["adguardhome==0.5.0"],
|
||||
"requirements": ["adguardhome==0.5.1"],
|
||||
"codeowners": ["@frenck"],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
25
homeassistant/components/advantage_air/diagnostics.py
Normal file
25
homeassistant/components/advantage_air/diagnostics.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Provides diagnostics for Advantage Air."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN as ADVANTAGE_AIR_DOMAIN
|
||||
|
||||
TO_REDACT = ["dealerPhoneNumber", "latitude", "logoPIN", "longitude", "postCode"]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
data = hass.data[ADVANTAGE_AIR_DOMAIN][config_entry.entry_id]["coordinator"].data
|
||||
|
||||
# Return only the relevant children
|
||||
return {
|
||||
"aircons": data["aircons"],
|
||||
"system": async_redact_data(data["system"], TO_REDACT),
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "aladdin_connect",
|
||||
"name": "Aladdin Connect",
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"requirements": ["aladdin_connect==0.3"],
|
||||
"requirements": ["aladdin_connect==0.4"],
|
||||
"codeowners": [],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Alexa related errors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import API_TEMP_UNITS
|
||||
@@ -58,6 +60,30 @@ class AlexaInvalidValueError(AlexaError):
|
||||
error_type = "INVALID_VALUE"
|
||||
|
||||
|
||||
class AlexaInteralError(AlexaError):
|
||||
"""Class to represent internal errors."""
|
||||
|
||||
namespace = "Alexa"
|
||||
error_type = "INTERNAL_ERROR"
|
||||
|
||||
|
||||
class AlexaNotSupportedInCurrentMode(AlexaError):
|
||||
"""The device is not in the correct mode to support this command."""
|
||||
|
||||
namespace = "Alexa"
|
||||
error_type = "NOT_SUPPORTED_IN_CURRENT_MODE"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
endpoint_id: str,
|
||||
current_mode: Literal["COLOR", "ASLEEP", "NOT_PROVISIONED", "OTHER"],
|
||||
) -> None:
|
||||
"""Initialize invalid endpoint error."""
|
||||
msg = f"Not supported while in {current_mode} mode"
|
||||
AlexaError.__init__(self, msg, {"currentDeviceMode": current_mode})
|
||||
self.endpoint_id = endpoint_id
|
||||
|
||||
|
||||
class AlexaUnsupportedThermostatModeError(AlexaError):
|
||||
"""Class to represent UnsupportedThermostatMode errors."""
|
||||
|
||||
|
||||
@@ -212,20 +212,14 @@ async def async_api_adjust_brightness(hass, config, directive, context):
|
||||
entity = directive.entity
|
||||
brightness_delta = int(directive.payload["brightnessDelta"])
|
||||
|
||||
# read current state
|
||||
try:
|
||||
current = math.floor(
|
||||
int(entity.attributes.get(light.ATTR_BRIGHTNESS)) / 255 * 100
|
||||
)
|
||||
except ZeroDivisionError:
|
||||
current = 0
|
||||
|
||||
# set brightness
|
||||
brightness = max(0, brightness_delta + current)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_BRIGHTNESS_PCT: brightness},
|
||||
{
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_BRIGHTNESS_STEP_PCT: brightness_delta,
|
||||
},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
|
||||
@@ -48,8 +48,18 @@ async def async_handle_message(hass, config, request, context=None, enabled=True
|
||||
response = directive.error()
|
||||
except AlexaError as err:
|
||||
response = directive.error(
|
||||
error_type=err.error_type, error_message=err.error_message
|
||||
error_type=err.error_type,
|
||||
error_message=err.error_message,
|
||||
payload=err.payload,
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Uncaught exception processing Alexa %s/%s request (%s)",
|
||||
directive.namespace,
|
||||
directive.name,
|
||||
directive.entity_id or "-",
|
||||
)
|
||||
response = directive.error(error_message="Unknown error")
|
||||
|
||||
request_info = {"namespace": directive.namespace, "name": directive.name}
|
||||
|
||||
|
||||
@@ -515,8 +515,8 @@ class AmcrestCam(Camera):
|
||||
max_tries = 3
|
||||
for tries in range(max_tries, 0, -1):
|
||||
try:
|
||||
await getattr(self, f"_set_{func}")(value)
|
||||
new_value = await getattr(self, f"_get_{func}")()
|
||||
await getattr(self, f"_async_set_{func}")(value)
|
||||
new_value = await getattr(self, f"_async_get_{func}")()
|
||||
if new_value != value:
|
||||
raise AmcrestCommandFailed
|
||||
except (AmcrestError, AmcrestCommandFailed) as error:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "amcrest",
|
||||
"name": "Amcrest",
|
||||
"documentation": "https://www.home-assistant.io/integrations/amcrest",
|
||||
"requirements": ["amcrest==1.9.3"],
|
||||
"requirements": ["amcrest==1.9.4"],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": ["@flacjacket"],
|
||||
"iot_class": "local_polling"
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -33,16 +34,30 @@ from .const import (
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_SERIALNO,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
_INVALID_MACS = {"ff:ff:ff:ff:ff:ff"}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_androidtv_mac(dev_props):
|
||||
"""Return formatted mac from device properties."""
|
||||
for prop_mac in (PROP_ETHMAC, PROP_WIFIMAC):
|
||||
if if_mac := dev_props.get(prop_mac):
|
||||
mac = format_mac(if_mac)
|
||||
if mac not in _INVALID_MACS:
|
||||
return mac
|
||||
return None
|
||||
|
||||
|
||||
def _setup_androidtv(hass, config):
|
||||
"""Generate an ADB key (if needed) and load it."""
|
||||
adbkey = config.get(CONF_ADBKEY, hass.config.path(STORAGE_DIR, "androidtv_adbkey"))
|
||||
|
||||
@@ -11,9 +11,8 @@ from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from . import async_connect_androidtv
|
||||
from . import async_connect_androidtv, get_androidtv_mac
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
CONF_ADB_SERVER_PORT,
|
||||
@@ -124,9 +123,15 @@ class AndroidTVFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
return RESULT_CONN_ERROR, None
|
||||
|
||||
dev_prop = aftv.device_properties
|
||||
unique_id = format_mac(
|
||||
dev_prop.get(PROP_ETHMAC) or dev_prop.get(PROP_WIFIMAC, "")
|
||||
_LOGGER.info(
|
||||
"Android TV at %s: %s = %r, %s = %r",
|
||||
user_input[CONF_HOST],
|
||||
PROP_ETHMAC,
|
||||
dev_prop.get(PROP_ETHMAC),
|
||||
PROP_WIFIMAC,
|
||||
dev_prop.get(PROP_WIFIMAC),
|
||||
)
|
||||
unique_id = get_androidtv_mac(dev_prop)
|
||||
await aftv.adb_close()
|
||||
return None, unique_id
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.4.0",
|
||||
"androidtv[async]==0.0.60",
|
||||
"androidtv[async]==0.0.63",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
],
|
||||
"codeowners": ["@JeffLIrion", "@ollo69"],
|
||||
|
||||
@@ -51,12 +51,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import get_androidtv_mac
|
||||
from .const import (
|
||||
ANDROID_DEV,
|
||||
ANDROID_DEV_OPT,
|
||||
@@ -80,8 +81,6 @@ from .const import (
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
|
||||
@@ -343,7 +342,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
self._attr_device_info[ATTR_MANUFACTURER] = manufacturer
|
||||
if sw_version := info.get(ATTR_SW_VERSION):
|
||||
self._attr_device_info[ATTR_SW_VERSION] = sw_version
|
||||
if mac := format_mac(info.get(PROP_ETHMAC) or info.get(PROP_WIFIMAC, "")):
|
||||
if mac := get_androidtv_mac(info):
|
||||
self._attr_device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, mac)}
|
||||
|
||||
self._app_id_to_name = {}
|
||||
|
||||
@@ -179,7 +179,6 @@ class AppleTVManager:
|
||||
def _handle_disconnect(self):
|
||||
"""Handle that the device disconnected and restart connect loop."""
|
||||
if self.atv:
|
||||
self.atv.listener = None
|
||||
self.atv.close()
|
||||
self.atv = None
|
||||
self._dispatch_send(SIGNAL_DISCONNECTED)
|
||||
@@ -196,8 +195,6 @@ class AppleTVManager:
|
||||
self._is_on = False
|
||||
try:
|
||||
if self.atv:
|
||||
self.atv.push_updater.listener = None
|
||||
self.atv.push_updater.stop()
|
||||
self.atv.close()
|
||||
self.atv = None
|
||||
if self._task:
|
||||
|
||||
@@ -162,15 +162,15 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
||||
except exceptions.ProtocolError:
|
||||
_LOGGER.exception("Failed to update app list")
|
||||
else:
|
||||
self._app_list = {app.name: app.identifier for app in apps}
|
||||
self._app_list = {
|
||||
app.name: app.identifier
|
||||
for app in sorted(apps, key=lambda app: app.name.lower())
|
||||
}
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def async_device_disconnected(self):
|
||||
"""Handle when connection was lost to device."""
|
||||
self.atv.push_updater.stop()
|
||||
self.atv.push_updater.listener = None
|
||||
self.atv.power.listener = None
|
||||
self._attr_supported_features = SUPPORT_APPLE_TV
|
||||
|
||||
@property
|
||||
|
||||
@@ -43,7 +43,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await async_setup_august(hass, entry, august_gateway)
|
||||
except (RequireValidation, InvalidAuth) as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (ClientResponseError, CannotConnect, asyncio.TimeoutError) as err:
|
||||
except asyncio.TimeoutError as err:
|
||||
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
||||
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
|
||||
@@ -141,15 +143,34 @@ class AugustData(AugustSubscriberMixin):
|
||||
self._pubnub_unsub = async_create_pubnub(user_data["UserID"], pubnub)
|
||||
|
||||
if self._locks_by_id:
|
||||
tasks = []
|
||||
for lock_id in self._locks_by_id:
|
||||
detail = self._device_detail_by_id[lock_id]
|
||||
tasks.append(
|
||||
self.async_status_async(
|
||||
lock_id, bool(detail.bridge and detail.bridge.hyper_bridge)
|
||||
)
|
||||
# Do not prevent setup as the sync can timeout
|
||||
# but it is not a fatal error as the lock
|
||||
# will recover automatically when it comes back online.
|
||||
asyncio.create_task(self._async_initial_sync())
|
||||
|
||||
async def _async_initial_sync(self):
|
||||
"""Attempt to request an initial sync."""
|
||||
# We don't care if this fails because we only want to wake
|
||||
# locks that are actually online anyways and they will be
|
||||
# awake when they come back online
|
||||
for result in await asyncio.gather(
|
||||
*[
|
||||
self.async_status_async(
|
||||
device_id, bool(detail.bridge and detail.bridge.hyper_bridge)
|
||||
)
|
||||
for device_id, detail in self._device_detail_by_id.items()
|
||||
if device_id in self._locks_by_id
|
||||
],
|
||||
return_exceptions=True,
|
||||
):
|
||||
if isinstance(result, Exception) and not isinstance(
|
||||
result, (asyncio.TimeoutError, ClientResponseError, CannotConnect)
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Unexpected exception during initial sync: %s",
|
||||
result,
|
||||
exc_info=result,
|
||||
)
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
@callback
|
||||
def async_pubnub_message(self, device_id, date_time, message):
|
||||
@@ -185,12 +206,28 @@ class AugustData(AugustSubscriberMixin):
|
||||
await self._async_refresh_device_detail_by_ids(self._subscriptions.keys())
|
||||
|
||||
async def _async_refresh_device_detail_by_ids(self, device_ids_list):
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self._async_refresh_device_detail_by_id(device_id)
|
||||
for device_id in device_ids_list
|
||||
)
|
||||
)
|
||||
"""Refresh each device in sequence.
|
||||
|
||||
This used to be a gather but it was less reliable with august's
|
||||
recent api changes.
|
||||
|
||||
The august api has been timing out for some devices so
|
||||
we want the ones that it isn't timing out for to keep working.
|
||||
"""
|
||||
for device_id in device_ids_list:
|
||||
try:
|
||||
await self._async_refresh_device_detail_by_id(device_id)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timed out calling august api during refresh of device: %s",
|
||||
device_id,
|
||||
)
|
||||
except (ClientResponseError, CannotConnect) as err:
|
||||
_LOGGER.warning(
|
||||
"Error from august api during refresh of device: %s",
|
||||
device_id,
|
||||
exc_info=err,
|
||||
)
|
||||
|
||||
async def _async_refresh_device_detail_by_id(self, device_id):
|
||||
if device_id in self._locks_by_id:
|
||||
|
||||
@@ -4,7 +4,7 @@ from datetime import timedelta
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DEFAULT_TIMEOUT = 10
|
||||
DEFAULT_TIMEOUT = 15
|
||||
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file"
|
||||
CONF_LOGIN_METHOD = "login_method"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "august",
|
||||
"name": "August",
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"requirements": ["yalexs==1.1.19"],
|
||||
"requirements": ["yalexs==1.1.20"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Bosch SHC",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||
"requirements": ["boschshcpy==0.2.28"],
|
||||
"requirements": ["boschshcpy==0.2.29"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "bosch shc*" }],
|
||||
"iot_class": "local_push",
|
||||
"codeowners": ["@tschamm"],
|
||||
|
||||
@@ -113,8 +113,9 @@ class ButtonEntity(RestoreEntity):
|
||||
self.async_write_ha_state()
|
||||
await self.async_press()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the button is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state is not None:
|
||||
self.__last_pressed = dt_util.parse_datetime(state.state)
|
||||
|
||||
@@ -232,7 +232,11 @@ class WebDavCalendarData:
|
||||
new_events.append(new_event)
|
||||
elif _start_of_tomorrow <= start_dt:
|
||||
break
|
||||
vevents = [event.instance.vevent for event in results + new_events]
|
||||
vevents = [
|
||||
event.instance.vevent
|
||||
for event in results + new_events
|
||||
if hasattr(event.instance, "vevent")
|
||||
]
|
||||
|
||||
# dtstart can be a date or datetime depending if the event lasts a
|
||||
# whole day. Convert everything to datetime to be able to sort it
|
||||
|
||||
@@ -1,12 +1,21 @@
|
||||
"""Component to embed Google Cast."""
|
||||
import logging
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Protocol
|
||||
|
||||
from pychromecast import Chromecast
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.integration_platform import (
|
||||
async_process_integration_platforms,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import home_assistant_cast
|
||||
@@ -49,9 +58,58 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Cast from a config entry."""
|
||||
await home_assistant_cast.async_setup_ha_cast(hass, entry)
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
hass.data[DOMAIN] = {}
|
||||
await async_process_integration_platforms(hass, DOMAIN, _register_cast_platform)
|
||||
return True
|
||||
|
||||
|
||||
class CastProtocol(Protocol):
|
||||
"""Define the format of cast platforms."""
|
||||
|
||||
async def async_get_media_browser_root_object(
|
||||
self, hass: HomeAssistant, cast_type: str
|
||||
) -> list[BrowseMedia]:
|
||||
"""Create a list of root objects for media browsing."""
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
media_content_type: str,
|
||||
media_content_id: str,
|
||||
cast_type: str,
|
||||
) -> BrowseMedia | None:
|
||||
"""Browse media.
|
||||
|
||||
Return a BrowseMedia object or None if the media does not belong to this platform.
|
||||
"""
|
||||
|
||||
async def async_play_media(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
cast_entity_id: str,
|
||||
chromecast: Chromecast,
|
||||
media_type: str,
|
||||
media_id: str,
|
||||
) -> bool:
|
||||
"""Play media.
|
||||
|
||||
Return True if the media is played by the platform, False if not.
|
||||
"""
|
||||
|
||||
|
||||
async def _register_cast_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: CastProtocol
|
||||
):
|
||||
"""Register a cast platform."""
|
||||
if (
|
||||
not hasattr(platform, "async_get_media_browser_root_object")
|
||||
or not hasattr(platform, "async_browse_media")
|
||||
or not hasattr(platform, "async_play_media")
|
||||
):
|
||||
raise HomeAssistantError(f"Invalid cast platform {platform}")
|
||||
hass.data[DOMAIN][integration_domain] = platform
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove Home Assistant Cast user."""
|
||||
await home_assistant_cast.async_remove_user(hass, entry)
|
||||
|
||||
@@ -11,7 +11,6 @@ from urllib.parse import quote
|
||||
import pychromecast
|
||||
from pychromecast.controllers.homeassistant import HomeAssistantController
|
||||
from pychromecast.controllers.multizone import MultizoneManager
|
||||
from pychromecast.controllers.plex import PlexController
|
||||
from pychromecast.controllers.receiver import VOLUME_CONTROL_TYPE_FIXED
|
||||
from pychromecast.quick_play import quick_play
|
||||
from pychromecast.socket_client import (
|
||||
@@ -20,7 +19,7 @@ from pychromecast.socket_client import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source, plex, zeroconf
|
||||
from homeassistant.components import media_source, zeroconf
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import (
|
||||
BrowseError,
|
||||
@@ -29,7 +28,6 @@ from homeassistant.components.media_player import (
|
||||
)
|
||||
from homeassistant.components.media_player.const import (
|
||||
ATTR_MEDIA_EXTRA,
|
||||
MEDIA_CLASS_APP,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_MOVIE,
|
||||
MEDIA_TYPE_MUSIC,
|
||||
@@ -47,13 +45,12 @@ from homeassistant.components.media_player.const import (
|
||||
SUPPORT_VOLUME_MUTE,
|
||||
SUPPORT_VOLUME_SET,
|
||||
)
|
||||
from homeassistant.components.plex.const import PLEX_URI_SCHEME
|
||||
from homeassistant.components.plex.services import lookup_plex_media
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_IDLE,
|
||||
STATE_OFF,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
)
|
||||
@@ -463,21 +460,15 @@ class CastDevice(MediaPlayerEntity):
|
||||
async def _async_root_payload(self, content_filter):
|
||||
"""Generate root node."""
|
||||
children = []
|
||||
# Add external sources
|
||||
if "plex" in self.hass.config.components:
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
title="Plex",
|
||||
media_class=MEDIA_CLASS_APP,
|
||||
media_content_id="",
|
||||
media_content_type="plex",
|
||||
thumbnail="https://brands.home-assistant.io/_/plex/logo.png",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
# Add media browsers
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
children.extend(
|
||||
await platform.async_get_media_browser_root_object(
|
||||
self.hass, self._chromecast.cast_type
|
||||
)
|
||||
)
|
||||
|
||||
# Add local media source
|
||||
# Add media sources
|
||||
try:
|
||||
result = await media_source.async_browse_media(
|
||||
self.hass, None, content_filter=content_filter
|
||||
@@ -508,7 +499,10 @@ class CastDevice(MediaPlayerEntity):
|
||||
"""Implement the websocket media browsing helper."""
|
||||
content_filter = None
|
||||
|
||||
if self._chromecast.cast_type == pychromecast.const.CAST_TYPE_AUDIO:
|
||||
if self._chromecast.cast_type in (
|
||||
pychromecast.const.CAST_TYPE_AUDIO,
|
||||
pychromecast.const.CAST_TYPE_GROUP,
|
||||
):
|
||||
|
||||
def audio_content_filter(item):
|
||||
"""Filter non audio content."""
|
||||
@@ -519,14 +513,15 @@ class CastDevice(MediaPlayerEntity):
|
||||
if media_content_id is None:
|
||||
return await self._async_root_payload(content_filter)
|
||||
|
||||
if plex.is_plex_media_id(media_content_id):
|
||||
return await plex.async_browse_media(
|
||||
self.hass, media_content_type, media_content_id, platform=CAST_DOMAIN
|
||||
)
|
||||
if media_content_type == "plex":
|
||||
return await plex.async_browse_media(
|
||||
self.hass, None, None, platform=CAST_DOMAIN
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
browse_media = await platform.async_browse_media(
|
||||
self.hass,
|
||||
media_content_type,
|
||||
media_content_id,
|
||||
self._chromecast.cast_type,
|
||||
)
|
||||
if browse_media:
|
||||
return browse_media
|
||||
|
||||
return await media_source.async_browse_media(
|
||||
self.hass, media_content_id, content_filter=content_filter
|
||||
@@ -556,7 +551,7 @@ class CastDevice(MediaPlayerEntity):
|
||||
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
|
||||
metadata = extra.get("metadata")
|
||||
|
||||
# We do not want this to be forwarded to a group
|
||||
# Handle media supported by a known cast app
|
||||
if media_type == CAST_DOMAIN:
|
||||
try:
|
||||
app_data = json.loads(media_id)
|
||||
@@ -588,23 +583,21 @@ class CastDevice(MediaPlayerEntity):
|
||||
)
|
||||
except NotImplementedError:
|
||||
_LOGGER.error("App %s not supported", app_name)
|
||||
return
|
||||
|
||||
# Handle plex
|
||||
elif media_id and media_id.startswith(PLEX_URI_SCHEME):
|
||||
media_id = media_id[len(PLEX_URI_SCHEME) :]
|
||||
media = await self.hass.async_add_executor_job(
|
||||
lookup_plex_media, self.hass, media_type, media_id
|
||||
# Try the cast platforms
|
||||
for platform in self.hass.data[CAST_DOMAIN].values():
|
||||
result = await platform.async_play_media(
|
||||
self.hass, self.entity_id, self._chromecast, media_type, media_id
|
||||
)
|
||||
if media is None:
|
||||
if result:
|
||||
return
|
||||
controller = PlexController()
|
||||
self._chromecast.register_handler(controller)
|
||||
await self.hass.async_add_executor_job(controller.play_media, media)
|
||||
else:
|
||||
app_data = {"media_id": media_id, "media_type": media_type, **extra}
|
||||
await self.hass.async_add_executor_job(
|
||||
quick_play, self._chromecast, "default_media_receiver", app_data
|
||||
)
|
||||
|
||||
# Default to play with the default media receiver
|
||||
app_data = {"media_id": media_id, "media_type": media_type, **extra}
|
||||
await self.hass.async_add_executor_job(
|
||||
quick_play, self._chromecast, "default_media_receiver", app_data
|
||||
)
|
||||
|
||||
def _media_status(self):
|
||||
"""
|
||||
@@ -644,7 +637,7 @@ class CastDevice(MediaPlayerEntity):
|
||||
return STATE_PLAYING
|
||||
return STATE_IDLE
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
return STATE_IDLE
|
||||
return STATE_OFF
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Provide configuration end points for Automations."""
|
||||
from collections import OrderedDict
|
||||
import uuid
|
||||
|
||||
from homeassistant.components.automation.config import (
|
||||
@@ -52,7 +51,18 @@ class EditAutomationConfigView(EditIdBasedConfigView):
|
||||
|
||||
def _write_value(self, hass, data, config_key, new_value):
|
||||
"""Set value."""
|
||||
index = None
|
||||
updated_value = {CONF_ID: config_key}
|
||||
|
||||
# Iterate through some keys that we want to have ordered in the output
|
||||
for key in ("alias", "description", "trigger", "condition", "action"):
|
||||
if key in new_value:
|
||||
updated_value[key] = new_value[key]
|
||||
|
||||
# We cover all current fields above, but just in case we start
|
||||
# supporting more fields in the future.
|
||||
updated_value.update(new_value)
|
||||
|
||||
updated = False
|
||||
for index, cur_value in enumerate(data):
|
||||
# When people copy paste their automations to the config file,
|
||||
# they sometimes forget to add IDs. Fix it here.
|
||||
@@ -60,23 +70,8 @@ class EditAutomationConfigView(EditIdBasedConfigView):
|
||||
cur_value[CONF_ID] = uuid.uuid4().hex
|
||||
|
||||
elif cur_value[CONF_ID] == config_key:
|
||||
break
|
||||
else:
|
||||
cur_value = OrderedDict()
|
||||
cur_value[CONF_ID] = config_key
|
||||
index = len(data)
|
||||
data.append(cur_value)
|
||||
data[index] = updated_value
|
||||
updated = True
|
||||
|
||||
# Iterate through some keys that we want to have ordered in the output
|
||||
updated_value = OrderedDict()
|
||||
for key in ("id", "alias", "description", "trigger", "condition", "action"):
|
||||
if key in cur_value:
|
||||
updated_value[key] = cur_value[key]
|
||||
if key in new_value:
|
||||
updated_value[key] = new_value[key]
|
||||
|
||||
# We cover all current fields above, but just in case we start
|
||||
# supporting more fields in the future.
|
||||
updated_value.update(cur_value)
|
||||
updated_value.update(new_value)
|
||||
data[index] = updated_value
|
||||
if not updated:
|
||||
data.append(updated_value)
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from http import HTTPStatus
|
||||
|
||||
from aiohttp import web
|
||||
import aiohttp.web_exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -11,7 +12,7 @@ from homeassistant.auth.permissions.const import CAT_CONFIG_ENTRIES, POLICY_EDIT
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import Unauthorized
|
||||
from homeassistant.exceptions import DependencyError, Unauthorized
|
||||
from homeassistant.helpers.data_entry_flow import (
|
||||
FlowManagerIndexView,
|
||||
FlowManagerResourceView,
|
||||
@@ -127,7 +128,13 @@ class ConfigManagerFlowIndexView(FlowManagerIndexView):
|
||||
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="add")
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request)
|
||||
try:
|
||||
return await super().post(request)
|
||||
except DependencyError as exc:
|
||||
return web.Response(
|
||||
text=f"Failed dependencies {', '.join(exc.failed_dependencies)}",
|
||||
status=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
def _prepare_result_json(self, result):
|
||||
"""Convert result to JSON."""
|
||||
|
||||
@@ -62,6 +62,9 @@ async def websocket_update_device(hass, connection, msg):
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
|
||||
if msg.get("disabled_by") is not None:
|
||||
msg["disabled_by"] = DeviceEntryDisabler(msg["disabled_by"])
|
||||
|
||||
entry = registry.async_update_device(**msg)
|
||||
|
||||
connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry)))
|
||||
|
||||
@@ -47,8 +47,8 @@ class EditSceneConfigView(EditIdBasedConfigView):
|
||||
|
||||
def _write_value(self, hass, data, config_key, new_value):
|
||||
"""Set value."""
|
||||
# Iterate through some keys that we want to have ordered in the output
|
||||
updated_value = {CONF_ID: config_key}
|
||||
# Iterate through some keys that we want to have ordered in the output
|
||||
for key in ("name", "entities"):
|
||||
if key in new_value:
|
||||
updated_value[key] = new_value[key]
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"cloud",
|
||||
"counter",
|
||||
"dhcp",
|
||||
"diagnostics",
|
||||
"energy",
|
||||
"frontend",
|
||||
"history",
|
||||
|
||||
@@ -179,6 +179,7 @@ class WatcherBase:
|
||||
lowercase_hostname,
|
||||
)
|
||||
|
||||
matched_domains = set()
|
||||
for entry in self._integration_matchers:
|
||||
if MAC_ADDRESS in entry and not fnmatch.fnmatch(
|
||||
uppercase_mac, entry[MAC_ADDRESS]
|
||||
@@ -191,6 +192,11 @@ class WatcherBase:
|
||||
continue
|
||||
|
||||
_LOGGER.debug("Matched %s against %s", data, entry)
|
||||
if entry["domain"] in matched_domains:
|
||||
# Only match once per domain
|
||||
continue
|
||||
|
||||
matched_domains.add(entry["domain"])
|
||||
discovery_flow.async_create_flow(
|
||||
self.hass,
|
||||
entry["domain"],
|
||||
|
||||
@@ -33,6 +33,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA_ADV = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
vol.Optional(CONF_RESOLVER, default=DEFAULT_RESOLVER): cv.string,
|
||||
vol.Optional(CONF_RESOLVER_IPV6, default=DEFAULT_RESOLVER_IPV6): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_hostname(
|
||||
@@ -94,8 +101,8 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
hostname = user_input[CONF_HOSTNAME]
|
||||
name = DEFAULT_NAME if hostname == DEFAULT_HOSTNAME else hostname
|
||||
resolver = DEFAULT_RESOLVER
|
||||
resolver_ipv6 = DEFAULT_RESOLVER_IPV6
|
||||
resolver = user_input.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
resolver_ipv6 = user_input.get(CONF_RESOLVER_IPV6, DEFAULT_RESOLVER_IPV6)
|
||||
|
||||
validate = await async_validate_hostname(hostname, resolver, resolver_ipv6)
|
||||
|
||||
@@ -110,13 +117,21 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
data={
|
||||
CONF_HOSTNAME: hostname,
|
||||
CONF_NAME: name,
|
||||
CONF_RESOLVER: resolver,
|
||||
CONF_RESOLVER_IPV6: resolver_ipv6,
|
||||
CONF_IPV4: validate[CONF_IPV4],
|
||||
CONF_IPV6: validate[CONF_IPV6],
|
||||
},
|
||||
options={
|
||||
CONF_RESOLVER: resolver,
|
||||
CONF_RESOLVER_IPV6: resolver_ipv6,
|
||||
},
|
||||
)
|
||||
|
||||
if self.show_advanced_options is True:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA_ADV,
|
||||
errors=errors,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
|
||||
@@ -79,10 +79,8 @@ async def async_setup_entry(
|
||||
hostname = entry.data[CONF_HOSTNAME]
|
||||
name = entry.data[CONF_NAME]
|
||||
|
||||
resolver_ipv4 = entry.options.get(CONF_RESOLVER, entry.data[CONF_RESOLVER])
|
||||
resolver_ipv6 = entry.options.get(
|
||||
CONF_RESOLVER_IPV6, entry.data[CONF_RESOLVER_IPV6]
|
||||
)
|
||||
resolver_ipv4 = entry.options[CONF_RESOLVER]
|
||||
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
|
||||
entities = []
|
||||
if entry.data[CONF_IPV4]:
|
||||
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False))
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "doods",
|
||||
"name": "DOODS - Dedicated Open Object Detection Service",
|
||||
"documentation": "https://www.home-assistant.io/integrations/doods",
|
||||
"requirements": ["pydoods==1.0.2", "pillow==9.0.0"],
|
||||
"requirements": ["pydoods==1.0.2", "pillow==9.0.1"],
|
||||
"codeowners": [],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_interval=DEFAULT_UPDATE_INTERVAL,
|
||||
update_method=partial(async_update, api_category),
|
||||
)
|
||||
data_init_tasks.append(coordinator.async_refresh())
|
||||
data_init_tasks.append(coordinator.async_config_entry_first_refresh())
|
||||
|
||||
await asyncio.gather(*data_init_tasks)
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||
"requirements": ["flux_led==0.28.17"],
|
||||
"requirements": ["flux_led==0.28.22"],
|
||||
"quality_scale": "platinum",
|
||||
"codeowners": ["@icemanch", "@bdraco"],
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
"""Support for AVM Fritz!Box functions."""
|
||||
import logging
|
||||
|
||||
from fritzconnection.core.exceptions import (
|
||||
FritzConnectionException,
|
||||
FritzResourceError,
|
||||
FritzSecurityError,
|
||||
)
|
||||
from fritzconnection.core.exceptions import FritzSecurityError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
|
||||
@@ -13,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
|
||||
from .common import AvmWrapper, FritzData
|
||||
from .const import DATA_FRITZ, DOMAIN, PLATFORMS
|
||||
from .const import DATA_FRITZ, DOMAIN, FRITZ_EXCEPTIONS, PLATFORMS
|
||||
from .services import async_setup_services, async_unload_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -34,7 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await avm_wrapper.async_setup(entry.options)
|
||||
except FritzSecurityError as ex:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
except (FritzConnectionException, FritzResourceError) as ex:
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -12,10 +12,7 @@ from typing import Any, TypedDict, cast
|
||||
from fritzconnection import FritzConnection
|
||||
from fritzconnection.core.exceptions import (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzConnectionException,
|
||||
FritzInternalError,
|
||||
FritzLookUpError,
|
||||
FritzSecurityError,
|
||||
FritzServiceError,
|
||||
)
|
||||
@@ -46,6 +43,7 @@ from .const import (
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_USERNAME,
|
||||
DOMAIN,
|
||||
FRITZ_EXCEPTIONS,
|
||||
SERVICE_CLEANUP,
|
||||
SERVICE_REBOOT,
|
||||
SERVICE_RECONNECT,
|
||||
@@ -157,7 +155,8 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
self.hass = hass
|
||||
self.host = host
|
||||
self.mesh_role = MeshRoles.NONE
|
||||
self.device_is_router: bool = True
|
||||
self.device_conn_type: str | None = None
|
||||
self.device_is_router: bool = False
|
||||
self.password = password
|
||||
self.port = port
|
||||
self.username = username
|
||||
@@ -188,9 +187,26 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
_LOGGER.error("Unable to establish a connection with %s", self.host)
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"detected services on %s %s",
|
||||
self.host,
|
||||
list(self.connection.services.keys()),
|
||||
)
|
||||
|
||||
self.fritz_hosts = FritzHosts(fc=self.connection)
|
||||
self.fritz_status = FritzStatus(fc=self.connection)
|
||||
info = self.connection.call_action("DeviceInfo:1", "GetInfo")
|
||||
|
||||
_LOGGER.debug(
|
||||
"gathered device info of %s %s",
|
||||
self.host,
|
||||
{
|
||||
**info,
|
||||
"NewDeviceLog": "***omitted***",
|
||||
"NewSerialNumber": "***omitted***",
|
||||
},
|
||||
)
|
||||
|
||||
if not self._unique_id:
|
||||
self._unique_id = info["NewSerialNumber"]
|
||||
|
||||
@@ -198,15 +214,23 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
self._current_firmware = info.get("NewSoftwareVersion")
|
||||
|
||||
self._update_available, self._latest_firmware = self._update_device_info()
|
||||
self.device_is_router = "WANIPConn1" in self.connection.services
|
||||
if "Layer3Forwarding1" in self.connection.services:
|
||||
if connection_type := self.connection.call_action(
|
||||
"Layer3Forwarding1", "GetDefaultConnectionService"
|
||||
).get("NewDefaultConnectionService"):
|
||||
# Return NewDefaultConnectionService sample: "1.WANPPPConnection.1"
|
||||
self.device_conn_type = connection_type[2:][:-2]
|
||||
self.device_is_router = self.connection.call_action(
|
||||
self.device_conn_type, "GetInfo"
|
||||
).get("NewEnable")
|
||||
|
||||
@callback
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update FritzboxTools data."""
|
||||
try:
|
||||
await self.async_scan_devices()
|
||||
except (FritzSecurityError, FritzConnectionException) as ex:
|
||||
raise update_coordinator.UpdateFailed from ex
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
raise update_coordinator.UpdateFailed(ex) from ex
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
@@ -279,11 +303,19 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
|
||||
def _get_wan_access(self, ip_address: str) -> bool | None:
|
||||
"""Get WAN access rule for given IP address."""
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
try:
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
_LOGGER.debug(
|
||||
"could not get WAN access rule for client device with IP '%s', error: %s",
|
||||
ip_address,
|
||||
ex,
|
||||
)
|
||||
return None
|
||||
|
||||
async def async_scan_devices(self, now: datetime | None = None) -> None:
|
||||
"""Wrap up FritzboxTools class scan."""
|
||||
@@ -357,13 +389,14 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
|
||||
dev_info: Device = hosts[dev_mac]
|
||||
|
||||
if dev_info.ip_address:
|
||||
dev_info.wan_access = self._get_wan_access(dev_info.ip_address)
|
||||
|
||||
for link in interf["node_links"]:
|
||||
intf = mesh_intf.get(link["node_interface_1_uid"])
|
||||
if intf is not None:
|
||||
if intf["op_mode"] != "AP_GUEST" and dev_info.ip_address:
|
||||
dev_info.wan_access = self._get_wan_access(
|
||||
dev_info.ip_address
|
||||
)
|
||||
if intf["op_mode"] == "AP_GUEST":
|
||||
dev_info.wan_access = None
|
||||
|
||||
dev_info.connected_to = intf["device"]
|
||||
dev_info.connection_type = intf["type"]
|
||||
@@ -529,13 +562,7 @@ class AvmWrapper(FritzBoxTools):
|
||||
"Authorization Error: Please check the provided credentials and verify that you can log into the web interface",
|
||||
exc_info=True,
|
||||
)
|
||||
except (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzInternalError,
|
||||
FritzServiceError,
|
||||
FritzLookUpError,
|
||||
):
|
||||
except FRITZ_EXCEPTIONS:
|
||||
_LOGGER.error(
|
||||
"Service/Action Error: cannot execute service %s with action %s",
|
||||
service_name,
|
||||
@@ -549,11 +576,11 @@ class AvmWrapper(FritzBoxTools):
|
||||
)
|
||||
return {}
|
||||
|
||||
async def async_get_wan_dsl_interface_config(self) -> dict[str, Any]:
|
||||
"""Call WANDSLInterfaceConfig service."""
|
||||
async def async_get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return await self.hass.async_add_executor_job(
|
||||
partial(self.get_wan_dsl_interface_config)
|
||||
partial(self.get_wan_link_properties)
|
||||
)
|
||||
|
||||
async def async_get_port_mapping(self, con_type: str, index: int) -> dict[str, Any]:
|
||||
@@ -653,10 +680,12 @@ class AvmWrapper(FritzBoxTools):
|
||||
|
||||
return self._service_call_action("WLANConfiguration", str(index), "GetInfo")
|
||||
|
||||
def get_wan_dsl_interface_config(self) -> dict[str, Any]:
|
||||
"""Call WANDSLInterfaceConfig service."""
|
||||
def get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return self._service_call_action("WANDSLInterfaceConfig", "1", "GetInfo")
|
||||
return self._service_call_action(
|
||||
"WANCommonInterfaceConfig", "1", "GetCommonLinkProperties"
|
||||
)
|
||||
|
||||
def set_wlan_configuration(self, index: int, turn_on: bool) -> dict[str, Any]:
|
||||
"""Call SetEnable action from WLANConfiguration service."""
|
||||
|
||||
@@ -2,6 +2,14 @@
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from fritzconnection.core.exceptions import (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzInternalError,
|
||||
FritzLookUpError,
|
||||
FritzServiceError,
|
||||
)
|
||||
|
||||
from homeassistant.backports.enum import StrEnum
|
||||
from homeassistant.const import Platform
|
||||
|
||||
@@ -47,3 +55,11 @@ SWITCH_TYPE_PORTFORWARD = "PortForward"
|
||||
SWITCH_TYPE_WIFINETWORK = "WiFiNetwork"
|
||||
|
||||
UPTIME_DEVIATION = 5
|
||||
|
||||
FRITZ_EXCEPTIONS = (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzInternalError,
|
||||
FritzServiceError,
|
||||
FritzLookUpError,
|
||||
)
|
||||
|
||||
49
homeassistant/components/fritz/diagnostics.py
Normal file
49
homeassistant/components/fritz/diagnostics.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Diagnostics support for AVM FRITZ!Box."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .common import AvmWrapper
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = {CONF_USERNAME, CONF_PASSWORD}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a config entry."""
|
||||
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
diag_data = {
|
||||
"entry": async_redact_data(entry.as_dict(), TO_REDACT),
|
||||
"device_info": {
|
||||
"model": avm_wrapper.model,
|
||||
"current_firmware": avm_wrapper.current_firmware,
|
||||
"latest_firmware": avm_wrapper.latest_firmware,
|
||||
"update_available": avm_wrapper.update_available,
|
||||
"connection_type": avm_wrapper.device_conn_type,
|
||||
"is_router": avm_wrapper.device_is_router,
|
||||
"mesh_role": avm_wrapper.mesh_role,
|
||||
"last_update success": avm_wrapper.last_update_success,
|
||||
"last_exception": avm_wrapper.last_exception,
|
||||
"discovered_services": list(avm_wrapper.connection.services),
|
||||
"client_devices": [
|
||||
{
|
||||
"connected_to": device.connected_to,
|
||||
"connection_type": device.connection_type,
|
||||
"hostname": device.hostname,
|
||||
"is_connected": device.is_connected,
|
||||
"last_activity": device.last_activity,
|
||||
"wan_access": device.wan_access,
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
||||
return diag_data
|
||||
@@ -277,10 +277,14 @@ async def async_setup_entry(
|
||||
_LOGGER.debug("Setting up FRITZ!Box sensors")
|
||||
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
dsl: bool = False
|
||||
dslinterface = await avm_wrapper.async_get_wan_dsl_interface_config()
|
||||
if dslinterface:
|
||||
dsl = dslinterface["NewEnable"]
|
||||
link_properties = await avm_wrapper.async_get_wan_link_properties()
|
||||
dsl: bool = link_properties.get("NewWANAccessType") == "DSL"
|
||||
|
||||
_LOGGER.debug(
|
||||
"WANAccessType of FritzBox %s is '%s'",
|
||||
avm_wrapper.host,
|
||||
link_properties.get("NewWANAccessType"),
|
||||
)
|
||||
|
||||
entities = [
|
||||
FritzBoxSensor(avm_wrapper, entry.title, description)
|
||||
|
||||
@@ -81,16 +81,12 @@ def port_entities_list(
|
||||
|
||||
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_PORTFORWARD)
|
||||
entities_list: list[FritzBoxPortSwitch] = []
|
||||
connection_type = avm_wrapper.get_default_connection()
|
||||
if not connection_type:
|
||||
if not avm_wrapper.device_conn_type:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_PORTFORWARD)
|
||||
return []
|
||||
|
||||
# Return NewDefaultConnectionService sample: "1.WANPPPConnection.1"
|
||||
con_type: str = connection_type["NewDefaultConnectionService"][2:][:-2]
|
||||
|
||||
# Query port forwardings and setup a switch for each forward for the current device
|
||||
resp = avm_wrapper.get_num_port_mapping(con_type)
|
||||
resp = avm_wrapper.get_num_port_mapping(avm_wrapper.device_conn_type)
|
||||
if not resp:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
|
||||
return []
|
||||
@@ -107,7 +103,7 @@ def port_entities_list(
|
||||
|
||||
for i in range(port_forwards_count):
|
||||
|
||||
portmap = avm_wrapper.get_port_mapping(con_type, i)
|
||||
portmap = avm_wrapper.get_port_mapping(avm_wrapper.device_conn_type, i)
|
||||
if not portmap:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
|
||||
continue
|
||||
@@ -133,7 +129,7 @@ def port_entities_list(
|
||||
portmap,
|
||||
port_name,
|
||||
i,
|
||||
con_type,
|
||||
avm_wrapper.device_conn_type,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20220127.0"
|
||||
"home-assistant-frontend==20220203.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
"config",
|
||||
"device_automation",
|
||||
"diagnostics",
|
||||
"http",
|
||||
"lovelace",
|
||||
"onboarding",
|
||||
|
||||
@@ -96,7 +96,7 @@ class GenericCamera(Camera):
|
||||
if self._stream_source is not None:
|
||||
self._stream_source.hass = hass
|
||||
self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE]
|
||||
self._attr_frames_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._supported_features = SUPPORT_STREAM if self._stream_source else 0
|
||||
self.content_type = device_info[CONF_CONTENT_TYPE]
|
||||
self.verify_ssl = device_info[CONF_VERIFY_SSL]
|
||||
|
||||
@@ -10,7 +10,6 @@ from aiogithubapi import (
|
||||
GitHubException,
|
||||
GitHubLoginDeviceModel,
|
||||
GitHubLoginOauthModel,
|
||||
GitHubRepositoryModel,
|
||||
)
|
||||
from aiogithubapi.const import OAUTH_USER_LOGIN
|
||||
import voluptuous as vol
|
||||
@@ -34,11 +33,12 @@ from .const import (
|
||||
)
|
||||
|
||||
|
||||
async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
"""Return a list of repositories that the user has starred."""
|
||||
async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
"""Return a list of repositories that the user owns or has starred."""
|
||||
client = GitHubAPI(token=access_token, session=async_get_clientsession(hass))
|
||||
repositories = set()
|
||||
|
||||
async def _get_starred() -> list[GitHubRepositoryModel] | None:
|
||||
async def _get_starred_repositories() -> None:
|
||||
response = await client.user.starred(**{"params": {"per_page": 100}})
|
||||
if not response.is_last_page:
|
||||
results = await asyncio.gather(
|
||||
@@ -54,16 +54,44 @@ async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[s
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
return response.data
|
||||
repositories.update(response.data)
|
||||
|
||||
async def _get_personal_repositories() -> None:
|
||||
response = await client.user.repos(**{"params": {"per_page": 100}})
|
||||
if not response.is_last_page:
|
||||
results = await asyncio.gather(
|
||||
*(
|
||||
client.user.repos(
|
||||
**{"params": {"per_page": 100, "page": page_number}},
|
||||
)
|
||||
for page_number in range(
|
||||
response.next_page_number, response.last_page_number + 1
|
||||
)
|
||||
)
|
||||
)
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
repositories.update(response.data)
|
||||
|
||||
try:
|
||||
result = await _get_starred()
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_get_starred_repositories(),
|
||||
_get_personal_repositories(),
|
||||
)
|
||||
)
|
||||
|
||||
except GitHubException:
|
||||
return DEFAULT_REPOSITORIES
|
||||
|
||||
if not result or len(result) == 0:
|
||||
if len(repositories) == 0:
|
||||
return DEFAULT_REPOSITORIES
|
||||
return sorted((repo.full_name for repo in result), key=str.casefold)
|
||||
|
||||
return sorted(
|
||||
(repo.full_name for repo in repositories),
|
||||
key=str.casefold,
|
||||
)
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -153,9 +181,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
assert self._login is not None
|
||||
|
||||
if not user_input:
|
||||
repositories = await starred_repositories(
|
||||
self.hass, self._login.access_token
|
||||
)
|
||||
repositories = await get_repositories(self.hass, self._login.access_token)
|
||||
return self.async_show_form(
|
||||
step_id="repositories",
|
||||
data_schema=vol.Schema(
|
||||
@@ -205,7 +231,7 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
configured_repositories: list[str] = self.config_entry.options[
|
||||
CONF_REPOSITORIES
|
||||
]
|
||||
repositories = await starred_repositories(
|
||||
repositories = await get_repositories(
|
||||
self.hass, self.config_entry.data[CONF_ACCESS_TOKEN]
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "GitHub",
|
||||
"documentation": "https://www.home-assistant.io/integrations/github",
|
||||
"requirements": [
|
||||
"aiogithubapi==22.1.0"
|
||||
"aiogithubapi==22.2.0"
|
||||
],
|
||||
"codeowners": [
|
||||
"@timmo001",
|
||||
|
||||
@@ -68,6 +68,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_IP_ADDRESS: self.discovery_info[CONF_IP_ADDRESS]}
|
||||
)
|
||||
self._async_abort_entries_match(
|
||||
{CONF_IP_ADDRESS: self.discovery_info[CONF_IP_ADDRESS]}
|
||||
)
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
@@ -103,6 +106,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
CONF_IP_ADDRESS: discovery_info.ip,
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
}
|
||||
await self._async_set_unique_id(
|
||||
async_get_pin_from_uid(discovery_info.macaddress.replace(":", "").upper())
|
||||
)
|
||||
return await self._async_handle_discovery()
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
||||
44
homeassistant/components/homekit/diagnostics.py
Normal file
44
homeassistant/components/homekit/diagnostics.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Diagnostics support for HomeKit."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyhap.accessory_driver import AccessoryDriver
|
||||
from pyhap.state import State
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import HomeKit
|
||||
from .const import DOMAIN, HOMEKIT
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
homekit: HomeKit = hass.data[DOMAIN][entry.entry_id][HOMEKIT]
|
||||
driver: AccessoryDriver = homekit.driver
|
||||
data: dict[str, Any] = {
|
||||
"status": homekit.status,
|
||||
"config-entry": {
|
||||
"title": entry.title,
|
||||
"version": entry.version,
|
||||
"data": dict(entry.data),
|
||||
"options": dict(entry.options),
|
||||
},
|
||||
}
|
||||
if not driver:
|
||||
return data
|
||||
data.update(driver.get_accessories())
|
||||
state: State = driver.state
|
||||
data.update(
|
||||
{
|
||||
"client_properties": {
|
||||
str(client): props for client, props in state.client_properties.items()
|
||||
},
|
||||
"config_version": state.config_version,
|
||||
"pairing_id": state.mac,
|
||||
}
|
||||
)
|
||||
return data
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Class to hold all light accessories."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import math
|
||||
|
||||
@@ -12,12 +14,13 @@ from homeassistant.components.light import (
|
||||
ATTR_HS_COLOR,
|
||||
ATTR_MAX_MIREDS,
|
||||
ATTR_MIN_MIREDS,
|
||||
ATTR_RGB_COLOR,
|
||||
ATTR_RGBW_COLOR,
|
||||
ATTR_RGBWW_COLOR,
|
||||
ATTR_SUPPORTED_COLOR_MODES,
|
||||
ATTR_WHITE,
|
||||
COLOR_MODE_RGBW,
|
||||
COLOR_MODE_RGBWW,
|
||||
COLOR_MODE_WHITE,
|
||||
DOMAIN,
|
||||
brightness_supported,
|
||||
color_supported,
|
||||
@@ -32,9 +35,9 @@ from homeassistant.const import (
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.color import (
|
||||
color_hsv_to_RGB,
|
||||
color_temperature_mired_to_kelvin,
|
||||
color_temperature_to_hs,
|
||||
color_temperature_to_rgbww,
|
||||
)
|
||||
|
||||
from .accessories import TYPES, HomeAccessory
|
||||
@@ -51,12 +54,13 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RGB_COLOR = "rgb_color"
|
||||
|
||||
CHANGE_COALESCE_TIME_WINDOW = 0.01
|
||||
|
||||
DEFAULT_MIN_MIREDS = 153
|
||||
DEFAULT_MAX_MIREDS = 500
|
||||
|
||||
COLOR_MODES_WITH_WHITES = {COLOR_MODE_RGBW, COLOR_MODE_RGBWW}
|
||||
COLOR_MODES_WITH_WHITES = {COLOR_MODE_RGBW, COLOR_MODE_RGBWW, COLOR_MODE_WHITE}
|
||||
|
||||
|
||||
@TYPES.register("Light")
|
||||
@@ -79,8 +83,12 @@ class Light(HomeAccessory):
|
||||
self.color_modes = color_modes = (
|
||||
attributes.get(ATTR_SUPPORTED_COLOR_MODES) or []
|
||||
)
|
||||
self._previous_color_mode = attributes.get(ATTR_COLOR_MODE)
|
||||
self.color_supported = color_supported(color_modes)
|
||||
self.color_temp_supported = color_temp_supported(color_modes)
|
||||
self.rgbw_supported = COLOR_MODE_RGBW in color_modes
|
||||
self.rgbww_supported = COLOR_MODE_RGBWW in color_modes
|
||||
self.white_supported = COLOR_MODE_WHITE in color_modes
|
||||
self.brightness_supported = brightness_supported(color_modes)
|
||||
|
||||
if self.brightness_supported:
|
||||
@@ -89,7 +97,9 @@ class Light(HomeAccessory):
|
||||
if self.color_supported:
|
||||
self.chars.extend([CHAR_HUE, CHAR_SATURATION])
|
||||
|
||||
if self.color_temp_supported:
|
||||
if self.color_temp_supported or COLOR_MODES_WITH_WHITES.intersection(
|
||||
self.color_modes
|
||||
):
|
||||
self.chars.append(CHAR_COLOR_TEMPERATURE)
|
||||
|
||||
serv_light = self.add_preload_service(SERV_LIGHTBULB, self.chars)
|
||||
@@ -101,13 +111,22 @@ class Light(HomeAccessory):
|
||||
# to set to the correct initial value.
|
||||
self.char_brightness = serv_light.configure_char(CHAR_BRIGHTNESS, value=100)
|
||||
|
||||
if self.color_temp_supported:
|
||||
min_mireds = math.floor(attributes.get(ATTR_MIN_MIREDS, 153))
|
||||
max_mireds = math.ceil(attributes.get(ATTR_MAX_MIREDS, 500))
|
||||
if CHAR_COLOR_TEMPERATURE in self.chars:
|
||||
self.min_mireds = math.floor(
|
||||
attributes.get(ATTR_MIN_MIREDS, DEFAULT_MIN_MIREDS)
|
||||
)
|
||||
self.max_mireds = math.ceil(
|
||||
attributes.get(ATTR_MAX_MIREDS, DEFAULT_MAX_MIREDS)
|
||||
)
|
||||
if not self.color_temp_supported and not self.rgbww_supported:
|
||||
self.max_mireds = self.min_mireds
|
||||
self.char_color_temp = serv_light.configure_char(
|
||||
CHAR_COLOR_TEMPERATURE,
|
||||
value=min_mireds,
|
||||
properties={PROP_MIN_VALUE: min_mireds, PROP_MAX_VALUE: max_mireds},
|
||||
value=self.min_mireds,
|
||||
properties={
|
||||
PROP_MIN_VALUE: self.min_mireds,
|
||||
PROP_MAX_VALUE: self.max_mireds,
|
||||
},
|
||||
)
|
||||
|
||||
if self.color_supported:
|
||||
@@ -165,33 +184,32 @@ class Light(HomeAccessory):
|
||||
)
|
||||
return
|
||||
|
||||
# Handle white channels
|
||||
if CHAR_COLOR_TEMPERATURE in char_values:
|
||||
params[ATTR_COLOR_TEMP] = char_values[CHAR_COLOR_TEMPERATURE]
|
||||
events.append(f"color temperature at {params[ATTR_COLOR_TEMP]}")
|
||||
temp = char_values[CHAR_COLOR_TEMPERATURE]
|
||||
events.append(f"color temperature at {temp}")
|
||||
bright_val = round(
|
||||
((brightness_pct or self.char_brightness.value) * 255) / 100
|
||||
)
|
||||
if self.color_temp_supported:
|
||||
params[ATTR_COLOR_TEMP] = temp
|
||||
elif self.rgbww_supported:
|
||||
params[ATTR_RGBWW_COLOR] = color_temperature_to_rgbww(
|
||||
temp, bright_val, self.min_mireds, self.max_mireds
|
||||
)
|
||||
elif self.rgbw_supported:
|
||||
params[ATTR_RGBW_COLOR] = (*(0,) * 3, bright_val)
|
||||
elif self.white_supported:
|
||||
params[ATTR_WHITE] = bright_val
|
||||
|
||||
elif (
|
||||
CHAR_HUE in char_values
|
||||
or CHAR_SATURATION in char_values
|
||||
# If we are adjusting brightness we need to send the full RGBW/RGBWW values
|
||||
# since HomeKit does not support RGBW/RGBWW
|
||||
or brightness_pct
|
||||
and COLOR_MODES_WITH_WHITES.intersection(self.color_modes)
|
||||
):
|
||||
elif CHAR_HUE in char_values or CHAR_SATURATION in char_values:
|
||||
hue_sat = (
|
||||
char_values.get(CHAR_HUE, self.char_hue.value),
|
||||
char_values.get(CHAR_SATURATION, self.char_saturation.value),
|
||||
)
|
||||
_LOGGER.debug("%s: Set hs_color to %s", self.entity_id, hue_sat)
|
||||
events.append(f"set color at {hue_sat}")
|
||||
# HomeKit doesn't support RGBW/RGBWW so we need to remove any white values
|
||||
if COLOR_MODE_RGBWW in self.color_modes:
|
||||
val = brightness_pct or self.char_brightness.value
|
||||
params[ATTR_RGBWW_COLOR] = (*color_hsv_to_RGB(*hue_sat, val), 0, 0)
|
||||
elif COLOR_MODE_RGBW in self.color_modes:
|
||||
val = brightness_pct or self.char_brightness.value
|
||||
params[ATTR_RGBW_COLOR] = (*color_hsv_to_RGB(*hue_sat, val), 0)
|
||||
else:
|
||||
params[ATTR_HS_COLOR] = hue_sat
|
||||
params[ATTR_HS_COLOR] = hue_sat
|
||||
|
||||
if (
|
||||
brightness_pct
|
||||
@@ -200,6 +218,9 @@ class Light(HomeAccessory):
|
||||
):
|
||||
params[ATTR_BRIGHTNESS_PCT] = brightness_pct
|
||||
|
||||
_LOGGER.debug(
|
||||
"Calling light service with params: %s -> %s", char_values, params
|
||||
)
|
||||
self.async_call_service(DOMAIN, service, params, ", ".join(events))
|
||||
|
||||
@callback
|
||||
@@ -210,52 +231,59 @@ class Light(HomeAccessory):
|
||||
attributes = new_state.attributes
|
||||
color_mode = attributes.get(ATTR_COLOR_MODE)
|
||||
self.char_on.set_value(int(state == STATE_ON))
|
||||
color_mode_changed = self._previous_color_mode != color_mode
|
||||
self._previous_color_mode = color_mode
|
||||
|
||||
# Handle Brightness
|
||||
if self.brightness_supported:
|
||||
if (
|
||||
color_mode
|
||||
and COLOR_MODES_WITH_WHITES.intersection({color_mode})
|
||||
and (rgb_color := attributes.get(ATTR_RGB_COLOR))
|
||||
):
|
||||
# HomeKit doesn't support RGBW/RGBWW so we need to
|
||||
# give it the color brightness only
|
||||
brightness = max(rgb_color)
|
||||
else:
|
||||
brightness = attributes.get(ATTR_BRIGHTNESS)
|
||||
if isinstance(brightness, (int, float)):
|
||||
brightness = round(brightness / 255 * 100, 0)
|
||||
# The homeassistant component might report its brightness as 0 but is
|
||||
# not off. But 0 is a special value in homekit. When you turn on a
|
||||
# homekit accessory it will try to restore the last brightness state
|
||||
# which will be the last value saved by char_brightness.set_value.
|
||||
# But if it is set to 0, HomeKit will update the brightness to 100 as
|
||||
# it thinks 0 is off.
|
||||
#
|
||||
# Therefore, if the the brightness is 0 and the device is still on,
|
||||
# the brightness is mapped to 1 otherwise the update is ignored in
|
||||
# order to avoid this incorrect behavior.
|
||||
if brightness == 0 and state == STATE_ON:
|
||||
brightness = 1
|
||||
self.char_brightness.set_value(brightness)
|
||||
if (
|
||||
self.brightness_supported
|
||||
and (brightness := attributes.get(ATTR_BRIGHTNESS)) is not None
|
||||
and isinstance(brightness, (int, float))
|
||||
):
|
||||
brightness = round(brightness / 255 * 100, 0)
|
||||
# The homeassistant component might report its brightness as 0 but is
|
||||
# not off. But 0 is a special value in homekit. When you turn on a
|
||||
# homekit accessory it will try to restore the last brightness state
|
||||
# which will be the last value saved by char_brightness.set_value.
|
||||
# But if it is set to 0, HomeKit will update the brightness to 100 as
|
||||
# it thinks 0 is off.
|
||||
#
|
||||
# Therefore, if the the brightness is 0 and the device is still on,
|
||||
# the brightness is mapped to 1 otherwise the update is ignored in
|
||||
# order to avoid this incorrect behavior.
|
||||
if brightness == 0 and state == STATE_ON:
|
||||
brightness = 1
|
||||
self.char_brightness.set_value(brightness)
|
||||
if color_mode_changed:
|
||||
self.char_brightness.notify()
|
||||
|
||||
# Handle Color - color must always be set before color temperature
|
||||
# or the iOS UI will not display it correctly.
|
||||
if self.color_supported:
|
||||
if ATTR_COLOR_TEMP in attributes:
|
||||
if color_temp := attributes.get(ATTR_COLOR_TEMP):
|
||||
hue, saturation = color_temperature_to_hs(
|
||||
color_temperature_mired_to_kelvin(
|
||||
new_state.attributes[ATTR_COLOR_TEMP]
|
||||
)
|
||||
color_temperature_mired_to_kelvin(color_temp)
|
||||
)
|
||||
elif color_mode == COLOR_MODE_WHITE:
|
||||
hue, saturation = 0, 0
|
||||
else:
|
||||
hue, saturation = attributes.get(ATTR_HS_COLOR, (None, None))
|
||||
if isinstance(hue, (int, float)) and isinstance(saturation, (int, float)):
|
||||
self.char_hue.set_value(round(hue, 0))
|
||||
self.char_saturation.set_value(round(saturation, 0))
|
||||
if color_mode_changed:
|
||||
# If the color temp changed, be sure to force the color to update
|
||||
self.char_hue.notify()
|
||||
self.char_saturation.notify()
|
||||
|
||||
# Handle color temperature
|
||||
if self.color_temp_supported:
|
||||
color_temp = attributes.get(ATTR_COLOR_TEMP)
|
||||
# Handle white channels
|
||||
if CHAR_COLOR_TEMPERATURE in self.chars:
|
||||
color_temp = None
|
||||
if self.color_temp_supported:
|
||||
color_temp = attributes.get(ATTR_COLOR_TEMP)
|
||||
elif color_mode == COLOR_MODE_WHITE:
|
||||
color_temp = self.min_mireds
|
||||
if isinstance(color_temp, (int, float)):
|
||||
self.char_color_temp.set_value(round(color_temp, 0))
|
||||
if color_mode_changed:
|
||||
self.char_color_temp.notify()
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "HomematicIP Cloud",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"requirements": ["homematicip==1.0.1"],
|
||||
"requirements": ["homematicip==1.0.2"],
|
||||
"codeowners": [],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "cloud_push"
|
||||
|
||||
@@ -3,10 +3,11 @@ import logging
|
||||
|
||||
from aiohwenergy import DisabledError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
@@ -20,6 +21,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
_LOGGER.debug("__init__ async_setup_entry")
|
||||
|
||||
# Migrate `homewizard_energy` (custom_component) to `homewizard`
|
||||
if entry.source == SOURCE_IMPORT and "old_config_entry_id" in entry.data:
|
||||
# Remove the old config entry ID from the entry data so we don't try this again
|
||||
# on the next setup
|
||||
data = entry.data.copy()
|
||||
old_config_entry_id = data.pop("old_config_entry_id")
|
||||
|
||||
hass.config_entries.async_update_entry(entry, data=data)
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Setting up imported homewizard_energy entry %s for the first time as "
|
||||
"homewizard entry %s"
|
||||
),
|
||||
old_config_entry_id,
|
||||
entry.entry_id,
|
||||
)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
for entity in er.async_entries_for_config_entry(ent_reg, old_config_entry_id):
|
||||
_LOGGER.debug("Removing %s", entity.entity_id)
|
||||
ent_reg.async_remove(entity.entity_id)
|
||||
|
||||
_LOGGER.debug("Re-creating %s for the new config entry", entity.entity_id)
|
||||
# We will precreate the entity so that any customizations can be preserved
|
||||
new_entity = ent_reg.async_get_or_create(
|
||||
entity.domain,
|
||||
DOMAIN,
|
||||
entity.unique_id,
|
||||
suggested_object_id=entity.entity_id.split(".")[1],
|
||||
disabled_by=entity.disabled_by,
|
||||
config_entry=entry,
|
||||
original_name=entity.original_name,
|
||||
original_icon=entity.original_icon,
|
||||
)
|
||||
_LOGGER.debug("Re-created %s", new_entity.entity_id)
|
||||
|
||||
# If there are customizations on the old entity, apply them to the new one
|
||||
if entity.name or entity.icon:
|
||||
ent_reg.async_update_entity(
|
||||
new_entity.entity_id, name=entity.name, icon=entity.icon
|
||||
)
|
||||
|
||||
# Remove the old config entry and now the entry is fully migrated
|
||||
hass.async_create_task(hass.config_entries.async_remove(old_config_entry_id))
|
||||
|
||||
# Create coordinator
|
||||
coordinator = Coordinator(hass, entry.data[CONF_IP_ADDRESS])
|
||||
try:
|
||||
|
||||
@@ -28,6 +28,21 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Initialize the HomeWizard config flow."""
|
||||
self.config: dict[str, str | int] = {}
|
||||
|
||||
async def async_step_import(self, import_config: dict) -> FlowResult:
|
||||
"""Handle a flow initiated by older `homewizard_energy` component."""
|
||||
_LOGGER.debug("config_flow async_step_import")
|
||||
|
||||
self.hass.components.persistent_notification.async_create(
|
||||
(
|
||||
"The custom integration of HomeWizard Energy has been migrated to core. "
|
||||
"You can safely remove the custom integration from the custom_integrations folder."
|
||||
),
|
||||
"HomeWizard Energy",
|
||||
f"homewizard_energy_to_{DOMAIN}",
|
||||
)
|
||||
|
||||
return await self.async_step_user({CONF_IP_ADDRESS: import_config["host"]})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
@@ -59,12 +74,17 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
data: dict[str, str] = {CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS]}
|
||||
|
||||
if self.source == config_entries.SOURCE_IMPORT:
|
||||
old_config_entry_id = self.context["old_config_entry_id"]
|
||||
assert self.hass.config_entries.async_get_entry(old_config_entry_id)
|
||||
data["old_config_entry_id"] = old_config_entry_id
|
||||
|
||||
# Add entry
|
||||
return self.async_create_entry(
|
||||
title=f"{device_info[CONF_PRODUCT_NAME]} ({device_info[CONF_SERIAL]})",
|
||||
data={
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
},
|
||||
data=data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
||||
@@ -8,6 +8,7 @@ import aiohwenergy
|
||||
import async_timeout
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL, DeviceResponseEntry
|
||||
@@ -28,7 +29,9 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry]
|
||||
"""Initialize Update Coordinator."""
|
||||
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL)
|
||||
self.api = aiohwenergy.HomeWizardEnergy(host)
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
self.api = aiohwenergy.HomeWizardEnergy(host, clientsession=session)
|
||||
|
||||
async def _async_update_data(self) -> DeviceResponseEntry:
|
||||
"""Fetch all device and sensor data from api."""
|
||||
|
||||
34
homeassistant/components/homewizard/diagnostics.py
Normal file
34
homeassistant/components/homewizard/diagnostics.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Diagnostics support for P1 Monitor."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HWEnergyDeviceUpdateCoordinator
|
||||
|
||||
TO_REDACT = {CONF_IP_ADDRESS, "serial", "wifi_ssid"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: HWEnergyDeviceUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
meter_data = {
|
||||
"device": coordinator.api.device.todict(),
|
||||
"data": coordinator.api.data.todict(),
|
||||
"state": coordinator.api.state.todict()
|
||||
if coordinator.api.state is not None
|
||||
else None,
|
||||
}
|
||||
|
||||
return {
|
||||
"entry": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": async_redact_data(meter_data, TO_REDACT),
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
"codeowners": ["@DCSBL"],
|
||||
"dependencies": [],
|
||||
"requirements": [
|
||||
"aiohwenergy==0.7.0"
|
||||
"aiohwenergy==0.8.0"
|
||||
],
|
||||
"zeroconf": ["_hwenergy._tcp.local."],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -242,7 +242,7 @@ class HoneywellUSThermostat(ClimateEntity):
|
||||
# Get current mode
|
||||
mode = self._device.system_mode
|
||||
# Set hold if this is not the case
|
||||
if getattr(self._device, f"hold_{mode}") is False:
|
||||
if getattr(self._device, f"hold_{mode}", None) is False:
|
||||
# Get next period key
|
||||
next_period_key = f"{mode.capitalize()}NextPeriod"
|
||||
# Get next period raw value
|
||||
|
||||
@@ -49,11 +49,12 @@ class HueBridge:
|
||||
self.logger = logging.getLogger(__name__)
|
||||
# store actual api connection to bridge as api
|
||||
app_key: str = self.config_entry.data[CONF_API_KEY]
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
if self.api_version == 1:
|
||||
self.api = HueBridgeV1(self.host, app_key, websession)
|
||||
self.api = HueBridgeV1(
|
||||
self.host, app_key, aiohttp_client.async_get_clientsession(hass)
|
||||
)
|
||||
else:
|
||||
self.api = HueBridgeV2(self.host, app_key, websession)
|
||||
self.api = HueBridgeV2(self.host, app_key)
|
||||
# store (this) bridge object in hass data
|
||||
hass.data.setdefault(DOMAIN, {})[self.config_entry.entry_id] = self
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Philips Hue",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hue",
|
||||
"requirements": ["aiohue==3.0.11"],
|
||||
"requirements": ["aiohue==4.0.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics",
|
||||
|
||||
@@ -76,7 +76,6 @@ async def handle_v2_migration(hass: core.HomeAssistant, entry: ConfigEntry) -> N
|
||||
"""Perform migration of devices and entities to V2 Id's."""
|
||||
host = entry.data[CONF_HOST]
|
||||
api_key = entry.data[CONF_API_KEY]
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
dev_reg = async_get_device_registry(hass)
|
||||
ent_reg = async_get_entity_registry(hass)
|
||||
LOGGER.info("Start of migration of devices and entities to support API schema 2")
|
||||
@@ -93,7 +92,7 @@ async def handle_v2_migration(hass: core.HomeAssistant, entry: ConfigEntry) -> N
|
||||
dev_ids[normalized_mac] = hass_dev.id
|
||||
|
||||
# initialize bridge connection just for the migration
|
||||
async with HueBridgeV2(host, api_key, websession) as api:
|
||||
async with HueBridgeV2(host, api_key) as api:
|
||||
|
||||
sensor_class_mapping = {
|
||||
SensorDeviceClass.BATTERY.value: ResourceTypes.DEVICE_POWER,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Image",
|
||||
"config_flow": false,
|
||||
"documentation": "https://www.home-assistant.io/integrations/image",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"dependencies": ["http"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"quality_scale": "internal"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "IntelliFire",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/intellifire",
|
||||
"requirements": ["intellifire4py==0.5"],
|
||||
"requirements": ["intellifire4py==0.6"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@jeeftor"],
|
||||
"iot_class": "local_polling"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/knx",
|
||||
"requirements": [
|
||||
"xknx==0.19.1"
|
||||
"xknx==0.19.2"
|
||||
],
|
||||
"codeowners": [
|
||||
"@Julius2342",
|
||||
|
||||
@@ -35,7 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
||||
from homeassistant.helpers.entity import validate_entity_category
|
||||
|
||||
from .const import (
|
||||
CONF_INVERT,
|
||||
@@ -320,7 +320,7 @@ class BinarySensorSchema(KNXPlatformSchema):
|
||||
),
|
||||
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -356,7 +356,7 @@ class ButtonSchema(KNXPlatformSchema):
|
||||
vol.Exclusive(
|
||||
CONF_TYPE, "length_or_type", msg=length_or_type_msg
|
||||
): object,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -500,7 +500,7 @@ class ClimateSchema(KNXPlatformSchema):
|
||||
): vol.In(HVAC_MODES),
|
||||
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -555,7 +555,7 @@ class CoverSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
|
||||
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -618,7 +618,7 @@ class FanSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_MAX_STEP): cv.byte,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -722,7 +722,7 @@ class LightSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -802,7 +802,7 @@ class NumberSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_MAX): vol.Coerce(float),
|
||||
vol.Optional(CONF_MIN): vol.Coerce(float),
|
||||
vol.Optional(CONF_STEP): cv.positive_float,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
number_limit_sub_validator,
|
||||
@@ -824,7 +824,7 @@ class SceneSchema(KNXPlatformSchema):
|
||||
vol.Required(CONF_SCENE_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=64)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -855,7 +855,7 @@ class SelectSchema(KNXPlatformSchema):
|
||||
],
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
select_options_sub_validator,
|
||||
@@ -880,7 +880,7 @@ class SensorSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -901,7 +901,7 @@ class SwitchSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -948,7 +948,7 @@ class WeatherSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -227,7 +227,7 @@ def _async_subscribe_pico_remote_events(
|
||||
action = ACTION_RELEASE
|
||||
|
||||
type_ = device["type"]
|
||||
name = device["name"]
|
||||
area, name = device["name"].split("_", 1)
|
||||
button_number = device["button_number"]
|
||||
# The original implementation used LIP instead of LEAP
|
||||
# so we need to convert the button number to maintain compat
|
||||
@@ -252,7 +252,7 @@ def _async_subscribe_pico_remote_events(
|
||||
ATTR_BUTTON_NUMBER: lip_button_number,
|
||||
ATTR_LEAP_BUTTON_NUMBER: button_number,
|
||||
ATTR_DEVICE_NAME: name,
|
||||
ATTR_AREA_NAME: name.split("_")[0],
|
||||
ATTR_AREA_NAME: area,
|
||||
ATTR_ACTION: action,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "lutron_caseta",
|
||||
"name": "Lutron Cas\u00e9ta",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
|
||||
"requirements": ["pylutron-caseta==0.13.0"],
|
||||
"requirements": ["pylutron-caseta==0.13.1"],
|
||||
"config_flow": true,
|
||||
"zeroconf": ["_leap._tcp.local."],
|
||||
"homekit": {
|
||||
|
||||
@@ -44,7 +44,7 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
||||
from homeassistant.helpers.entity import validate_entity_category
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
from .const import (
|
||||
@@ -423,7 +423,7 @@ def _validate_state_class_sensor(value: dict):
|
||||
vol.Optional(ATTR_SENSOR_STATE, default=None): vol.Any(
|
||||
None, bool, str, int, float
|
||||
),
|
||||
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): validate_entity_category,
|
||||
vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon,
|
||||
vol.Optional(ATTR_SENSOR_STATE_CLASS): vol.In(SENSOSR_STATE_CLASSES),
|
||||
},
|
||||
|
||||
@@ -222,17 +222,15 @@ class MotionEyeOptionsFlow(OptionsFlow):
|
||||
|
||||
if self.show_advanced_options:
|
||||
# The input URL is not validated as being a URL, to allow for the possibility
|
||||
# the template input won't be a valid URL until after it's rendered.
|
||||
schema.update(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_STREAM_URL_TEMPLATE,
|
||||
default=self._config_entry.options.get(
|
||||
CONF_STREAM_URL_TEMPLATE,
|
||||
"",
|
||||
),
|
||||
): str
|
||||
# the template input won't be a valid URL until after it's rendered
|
||||
stream_kwargs = {}
|
||||
if CONF_STREAM_URL_TEMPLATE in self._config_entry.options:
|
||||
stream_kwargs["description"] = {
|
||||
"suggested_value": self._config_entry.options[
|
||||
CONF_STREAM_URL_TEMPLATE
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
schema[vol.Optional(CONF_STREAM_URL_TEMPLATE, **stream_kwargs)] = str
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=vol.Schema(schema))
|
||||
|
||||
@@ -577,6 +577,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_subscribe)
|
||||
websocket_api.async_register_command(hass, websocket_remove_device)
|
||||
websocket_api.async_register_command(hass, websocket_mqtt_info)
|
||||
debug_info.initialize(hass)
|
||||
|
||||
if conf is None:
|
||||
# If we have a config entry, setup is done by that config entry.
|
||||
|
||||
@@ -20,6 +20,8 @@ from homeassistant.const import (
|
||||
CONF_PAYLOAD_OFF,
|
||||
CONF_PAYLOAD_ON,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -27,6 +29,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
import homeassistant.helpers.event as evt
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -95,7 +98,7 @@ async def _async_setup_entity(
|
||||
async_add_entities([MqttBinarySensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttBinarySensor(MqttEntity, BinarySensorEntity):
|
||||
class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
||||
"""Representation a binary sensor that is updated by MQTT."""
|
||||
|
||||
_entity_id_format = binary_sensor.ENTITY_ID_FORMAT
|
||||
@@ -113,6 +116,46 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity):
|
||||
|
||||
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore state for entities with expire_after set."""
|
||||
await super().async_added_to_hass()
|
||||
if (
|
||||
(expire_after := self._config.get(CONF_EXPIRE_AFTER)) is not None
|
||||
and expire_after > 0
|
||||
and (last_state := await self.async_get_last_state()) is not None
|
||||
and last_state.state not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
):
|
||||
expiration_at = last_state.last_changed + timedelta(seconds=expire_after)
|
||||
if expiration_at < (time_now := dt_util.utcnow()):
|
||||
# Skip reactivating the binary_sensor
|
||||
_LOGGER.debug("Skip state recovery after reload for %s", self.entity_id)
|
||||
return
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
if self._expiration_trigger:
|
||||
# We might have set up a trigger already after subscribing from
|
||||
# super().async_added_to_hass()
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"State recovered after reload for %s, remaining time before expiring %s",
|
||||
self.entity_id,
|
||||
expiration_at - time_now,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove exprire triggers."""
|
||||
# Clean up expire triggers
|
||||
if self._expiration_trigger:
|
||||
_LOGGER.debug("Clean up expire after trigger for %s", self.entity_id)
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
self._expired = False
|
||||
await MqttEntity.async_will_remove_from_hass(self)
|
||||
|
||||
@staticmethod
|
||||
def config_schema():
|
||||
"""Return the config schema."""
|
||||
@@ -150,7 +193,6 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity):
|
||||
# Reset old trigger
|
||||
if self._expiration_trigger:
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
|
||||
# Set new trigger
|
||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||
|
||||
@@ -15,6 +15,11 @@ DATA_MQTT_DEBUG_INFO = "mqtt_debug_info"
|
||||
STORED_MESSAGES = 10
|
||||
|
||||
|
||||
def initialize(hass: HomeAssistant):
|
||||
"""Initialize MQTT debug info."""
|
||||
hass.data[DATA_MQTT_DEBUG_INFO] = {"entities": {}, "triggers": {}}
|
||||
|
||||
|
||||
def log_messages(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> Callable[[MessageCallbackType], MessageCallbackType]:
|
||||
@@ -45,9 +50,7 @@ def log_messages(
|
||||
def add_subscription(hass, message_callback, subscription):
|
||||
"""Prepare debug data for subscription."""
|
||||
if entity_id := getattr(message_callback, "__entity_id", None):
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
entity_info = debug_info["entities"].setdefault(
|
||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||
)
|
||||
@@ -76,9 +79,7 @@ def remove_subscription(hass, message_callback, subscription):
|
||||
|
||||
def add_entity_discovery_data(hass, discovery_data, entity_id):
|
||||
"""Add discovery data."""
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
entity_info = debug_info["entities"].setdefault(
|
||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||
)
|
||||
@@ -93,14 +94,13 @@ def update_entity_discovery_data(hass, discovery_payload, entity_id):
|
||||
|
||||
def remove_entity_data(hass, entity_id):
|
||||
"""Remove discovery data."""
|
||||
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
||||
if entity_id in hass.data[DATA_MQTT_DEBUG_INFO]["entities"]:
|
||||
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
||||
|
||||
|
||||
def add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):
|
||||
"""Add discovery data."""
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
debug_info["triggers"][discovery_hash] = {
|
||||
"device_id": device_id,
|
||||
"discovery_data": discovery_data,
|
||||
@@ -126,9 +126,7 @@ async def info_for_device(hass, device_id):
|
||||
entries = hass.helpers.entity_registry.async_entries_for_device(
|
||||
entity_registry, device_id, include_disabled_entities=True
|
||||
)
|
||||
mqtt_debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
mqtt_debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
for entry in entries:
|
||||
if entry.entity_id not in mqtt_debug_info["entities"]:
|
||||
continue
|
||||
@@ -139,7 +137,7 @@ async def info_for_device(hass, device_id):
|
||||
"topic": topic,
|
||||
"messages": [
|
||||
{
|
||||
"payload": msg.payload,
|
||||
"payload": str(msg.payload),
|
||||
"qos": msg.qos,
|
||||
"retain": msg.retain,
|
||||
"time": msg.timestamp,
|
||||
|
||||
@@ -30,11 +30,11 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import (
|
||||
ENTITY_CATEGORIES_SCHEMA,
|
||||
DeviceInfo,
|
||||
Entity,
|
||||
EntityCategory,
|
||||
async_generate_entity_id,
|
||||
validate_entity_category,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -191,7 +191,7 @@ MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
|
||||
@@ -523,6 +523,11 @@ class MqttDiscoveryUpdate(Entity):
|
||||
async def async_removed_from_registry(self) -> None:
|
||||
"""Clear retained discovery topic in broker."""
|
||||
if not self._removed_from_hass:
|
||||
# Stop subscribing to discovery updates to not trigger when we clear the
|
||||
# discovery topic
|
||||
self._cleanup_discovery_on_remove()
|
||||
|
||||
# Clear the discovery topic so the entity is not rediscovered after a restart
|
||||
discovery_topic = self._discovery_data[ATTR_DISCOVERY_TOPIC]
|
||||
publish(self.hass, discovery_topic, "", retain=True)
|
||||
|
||||
@@ -544,7 +549,6 @@ class MqttDiscoveryUpdate(Entity):
|
||||
def _cleanup_discovery_on_remove(self) -> None:
|
||||
"""Stop listening to signal and cleanup discovery data."""
|
||||
if self._discovery_data and not self._removed_from_hass:
|
||||
debug_info.remove_entity_data(self.hass, self.entity_id)
|
||||
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
|
||||
self._removed_from_hass = True
|
||||
|
||||
@@ -672,6 +676,7 @@ class MqttEntity(
|
||||
await MqttAttributes.async_will_remove_from_hass(self)
|
||||
await MqttAvailability.async_will_remove_from_hass(self)
|
||||
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
|
||||
debug_info.remove_entity_data(self.hass, self.entity_id)
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
|
||||
@@ -23,12 +23,15 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -140,7 +143,7 @@ async def _async_setup_entity(
|
||||
async_add_entities([MqttSensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttSensor(MqttEntity, SensorEntity):
|
||||
class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
||||
"""Representation of a sensor that can be updated using MQTT."""
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
@@ -160,6 +163,46 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
|
||||
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore state for entities with expire_after set."""
|
||||
await super().async_added_to_hass()
|
||||
if (
|
||||
(expire_after := self._config.get(CONF_EXPIRE_AFTER)) is not None
|
||||
and expire_after > 0
|
||||
and (last_state := await self.async_get_last_state()) is not None
|
||||
and last_state.state not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
):
|
||||
expiration_at = last_state.last_changed + timedelta(seconds=expire_after)
|
||||
if expiration_at < (time_now := dt_util.utcnow()):
|
||||
# Skip reactivating the sensor
|
||||
_LOGGER.debug("Skip state recovery after reload for %s", self.entity_id)
|
||||
return
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
if self._expiration_trigger:
|
||||
# We might have set up a trigger already after subscribing from
|
||||
# super().async_added_to_hass()
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"State recovered after reload for %s, remaining time before expiring %s",
|
||||
self.entity_id,
|
||||
expiration_at - time_now,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove exprire triggers."""
|
||||
# Clean up expire triggers
|
||||
if self._expiration_trigger:
|
||||
_LOGGER.debug("Clean up expire after trigger for %s", self.entity_id)
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
self._expired = False
|
||||
await MqttEntity.async_will_remove_from_hass(self)
|
||||
|
||||
@staticmethod
|
||||
def config_schema():
|
||||
"""Return the config schema."""
|
||||
@@ -188,7 +231,6 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
# Reset old trigger
|
||||
if self._expiration_trigger:
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
|
||||
# Set new trigger
|
||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||
@@ -197,7 +239,7 @@ class MqttSensor(MqttEntity, SensorEntity):
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
|
||||
payload = self._template(msg.payload)
|
||||
payload = self._template(msg.payload, default=self._state)
|
||||
|
||||
if payload is not None and self.device_class in (
|
||||
SensorDeviceClass.DATE,
|
||||
|
||||
@@ -12,7 +12,7 @@ from google_nest_sdm.exceptions import ApiException
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_SUBSCRIBER, DOMAIN
|
||||
from .const import DATA_SDM, DATA_SUBSCRIBER, DOMAIN
|
||||
|
||||
REDACT_DEVICE_TRAITS = {InfoTrait.NAME}
|
||||
|
||||
@@ -21,6 +21,9 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a config entry."""
|
||||
if DATA_SDM not in config_entry.data:
|
||||
return {}
|
||||
|
||||
if DATA_SUBSCRIBER not in hass.data[DOMAIN]:
|
||||
return {"error": "No subscriber configured"}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Netatmo",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netatmo",
|
||||
"requirements": [
|
||||
"pyatmo==6.2.2"
|
||||
"pyatmo==6.2.4"
|
||||
],
|
||||
"after_dependencies": [
|
||||
"cloud",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "netgear",
|
||||
"name": "NETGEAR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
||||
"requirements": ["pynetgear==0.9.0"],
|
||||
"requirements": ["pynetgear==0.9.1"],
|
||||
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
||||
"iot_class": "local_polling",
|
||||
"config_flow": true,
|
||||
|
||||
@@ -70,7 +70,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
await openuv.async_update()
|
||||
except OpenUvError as err:
|
||||
except HomeAssistantError as err:
|
||||
LOGGER.error("Config entry failed: %s", err)
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/overkiz",
|
||||
"requirements": [
|
||||
"pyoverkiz==1.3.1"
|
||||
"pyoverkiz==1.3.2"
|
||||
],
|
||||
"zeroconf": [
|
||||
{
|
||||
|
||||
@@ -121,14 +121,22 @@ async def async_setup_entry(
|
||||
if coordinator.data:
|
||||
if coordinator.data.electricity:
|
||||
for description in SENSOR_TYPES_ELECTRICITY:
|
||||
if description.key == KEY_LAST_ELECTRICITY_COST:
|
||||
if (
|
||||
description.key == KEY_LAST_ELECTRICITY_COST
|
||||
and coordinator.data.electricity[-1] is not None
|
||||
and coordinator.data.electricity[-1].cost is not None
|
||||
):
|
||||
description.native_unit_of_measurement = (
|
||||
coordinator.data.electricity[-1].cost.currency_unit
|
||||
)
|
||||
entities.append(OVOEnergySensor(coordinator, description, client))
|
||||
if coordinator.data.gas:
|
||||
for description in SENSOR_TYPES_GAS:
|
||||
if description.key == KEY_LAST_GAS_COST:
|
||||
if (
|
||||
description.key == KEY_LAST_GAS_COST
|
||||
and coordinator.data.gas[-1] is not None
|
||||
and coordinator.data.gas[-1].cost is not None
|
||||
):
|
||||
description.native_unit_of_measurement = coordinator.data.gas[
|
||||
-1
|
||||
].cost.currency_unit
|
||||
|
||||
77
homeassistant/components/plex/cast.py
Normal file
77
homeassistant/components/plex/cast.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Google Cast support for the Plex component."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pychromecast import Chromecast
|
||||
from pychromecast.controllers.plex import PlexController
|
||||
|
||||
from homeassistant.components.cast.const import DOMAIN as CAST_DOMAIN
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.components.media_player.const import MEDIA_CLASS_APP
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import async_browse_media as async_browse_plex_media, is_plex_media_id
|
||||
from .const import PLEX_URI_SCHEME
|
||||
from .services import lookup_plex_media
|
||||
|
||||
|
||||
async def async_get_media_browser_root_object(
|
||||
hass: HomeAssistant, cast_type: str
|
||||
) -> list[BrowseMedia]:
|
||||
"""Create a root object for media browsing."""
|
||||
return [
|
||||
BrowseMedia(
|
||||
title="Plex",
|
||||
media_class=MEDIA_CLASS_APP,
|
||||
media_content_id="",
|
||||
media_content_type="plex",
|
||||
thumbnail="https://brands.home-assistant.io/_/plex/logo.png",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def async_browse_media(
|
||||
hass: HomeAssistant,
|
||||
media_content_type: str,
|
||||
media_content_id: str,
|
||||
cast_type: str,
|
||||
) -> BrowseMedia | None:
|
||||
"""Browse media."""
|
||||
if is_plex_media_id(media_content_id):
|
||||
return await async_browse_plex_media(
|
||||
hass, media_content_type, media_content_id, platform=CAST_DOMAIN
|
||||
)
|
||||
if media_content_type == "plex":
|
||||
return await async_browse_plex_media(hass, None, None, platform=CAST_DOMAIN)
|
||||
return None
|
||||
|
||||
|
||||
def _play_media(
|
||||
hass: HomeAssistant, chromecast: Chromecast, media_type: str, media_id: str
|
||||
) -> None:
|
||||
"""Play media."""
|
||||
media_id = media_id[len(PLEX_URI_SCHEME) :]
|
||||
media = lookup_plex_media(hass, media_type, media_id)
|
||||
if media is None:
|
||||
return
|
||||
controller = PlexController()
|
||||
chromecast.register_handler(controller)
|
||||
controller.play_media(media)
|
||||
|
||||
|
||||
async def async_play_media(
|
||||
hass: HomeAssistant,
|
||||
cast_entity_id: str,
|
||||
chromecast: Chromecast,
|
||||
media_type: str,
|
||||
media_id: str,
|
||||
) -> bool:
|
||||
"""Play media."""
|
||||
if media_id and media_id.startswith(PLEX_URI_SCHEME):
|
||||
await hass.async_add_executor_job(
|
||||
_play_media, hass, chromecast, media_type, media_id
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/plex",
|
||||
"requirements": [
|
||||
"plexapi==4.9.1",
|
||||
"plexapi==4.9.2",
|
||||
"plexauth==0.0.6",
|
||||
"plexwebsocket==0.0.13"
|
||||
],
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
"domain": "proxy",
|
||||
"name": "Camera Proxy",
|
||||
"documentation": "https://www.home-assistant.io/integrations/proxy",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"codeowners": []
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Sony PlayStation 4",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ps4",
|
||||
"requirements": ["pyps4-2ndscreen==1.2.0"],
|
||||
"requirements": ["pyps4-2ndscreen==1.3.1"],
|
||||
"codeowners": ["@ktnrg45"],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_SYSTEM_ID, DOMAIN
|
||||
from .const import CONF_SYSTEM_ID, DOMAIN, LOGGER
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, *, api_key: str, system_id: int) -> None:
|
||||
@@ -50,6 +50,7 @@ class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except PVOutputAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except PVOutputError:
|
||||
LOGGER.exception("Cannot connect to PVOutput")
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(str(user_input[CONF_SYSTEM_ID]))
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pvoutput",
|
||||
"config_flow": true,
|
||||
"codeowners": ["@fabaff", "@frenck"],
|
||||
"requirements": ["pvo==0.2.0"],
|
||||
"requirements": ["pvo==0.2.1"],
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "qrcode",
|
||||
"name": "QR Code",
|
||||
"documentation": "https://www.home-assistant.io/integrations/qrcode",
|
||||
"requirements": ["pillow==9.0.0", "pyzbar==0.1.7"],
|
||||
"requirements": ["pillow==9.0.1", "pyzbar==0.1.7"],
|
||||
"codeowners": [],
|
||||
"iot_class": "calculated"
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
import sqlalchemy
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
InternalError,
|
||||
OperationalError,
|
||||
ProgrammingError,
|
||||
@@ -68,20 +69,18 @@ def schema_is_current(current_version):
|
||||
|
||||
def migrate_schema(instance, current_version):
|
||||
"""Check if the schema needs to be upgraded."""
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
_LOGGER.warning(
|
||||
"Database is about to upgrade. Schema version: %s", current_version
|
||||
)
|
||||
for version in range(current_version, SCHEMA_VERSION):
|
||||
new_version = version + 1
|
||||
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
|
||||
_apply_update(instance, session, new_version, current_version)
|
||||
_LOGGER.warning("Database is about to upgrade. Schema version: %s", current_version)
|
||||
for version in range(current_version, SCHEMA_VERSION):
|
||||
new_version = version + 1
|
||||
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
|
||||
_apply_update(instance, new_version, current_version)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
session.add(SchemaChanges(schema_version=new_version))
|
||||
|
||||
_LOGGER.info("Upgrade to version %s done", new_version)
|
||||
_LOGGER.info("Upgrade to version %s done", new_version)
|
||||
|
||||
|
||||
def _create_index(connection, table_name, index_name):
|
||||
def _create_index(instance, table_name, index_name):
|
||||
"""Create an index for the specified table.
|
||||
|
||||
The index name should match the name given for the index
|
||||
@@ -102,18 +101,20 @@ def _create_index(connection, table_name, index_name):
|
||||
"be patient!",
|
||||
index_name,
|
||||
)
|
||||
try:
|
||||
index.create(connection)
|
||||
except (InternalError, ProgrammingError, OperationalError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Index %s already exists on %s, continuing", index_name, table_name
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
index.create(connection)
|
||||
except (InternalError, OperationalError, ProgrammingError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Index %s already exists on %s, continuing", index_name, table_name
|
||||
)
|
||||
|
||||
_LOGGER.debug("Finished creating %s", index_name)
|
||||
|
||||
|
||||
def _drop_index(connection, table_name, index_name):
|
||||
def _drop_index(instance, table_name, index_name):
|
||||
"""Drop an index from a specified table.
|
||||
|
||||
There is no universal way to do something like `DROP INDEX IF EXISTS`
|
||||
@@ -128,42 +129,48 @@ def _drop_index(connection, table_name, index_name):
|
||||
success = False
|
||||
|
||||
# Engines like DB2/Oracle
|
||||
try:
|
||||
connection.execute(text(f"DROP INDEX {index_name}"))
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(text(f"DROP INDEX {index_name}"))
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
# Engines like SQLite, SQL Server
|
||||
if not success:
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {table}.{index}".format(
|
||||
index=index_name, table=table_name
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {table}.{index}".format(
|
||||
index=index_name, table=table_name
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
if not success:
|
||||
# Engines like MySQL, MS Access
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {index} ON {table}".format(
|
||||
index=index_name, table=table_name
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {index} ON {table}".format(
|
||||
index=index_name, table=table_name
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
if success:
|
||||
_LOGGER.debug(
|
||||
@@ -184,7 +191,7 @@ def _drop_index(connection, table_name, index_name):
|
||||
)
|
||||
|
||||
|
||||
def _add_columns(connection, table_name, columns_def):
|
||||
def _add_columns(instance, table_name, columns_def):
|
||||
"""Add columns to a table."""
|
||||
_LOGGER.warning(
|
||||
"Adding columns %s to table %s. Note: this can take several "
|
||||
@@ -196,39 +203,43 @@ def _add_columns(connection, table_name, columns_def):
|
||||
|
||||
columns_def = [f"ADD {col_def}" for col_def in columns_def]
|
||||
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
# Some engines support adding all columns at once,
|
||||
# this error is when they don't
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Column %s already exists on %s, continuing",
|
||||
column_def.split(" ")[1],
|
||||
table_name,
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError, ProgrammingError):
|
||||
# Some engines support adding all columns at once,
|
||||
# this error is when they don't
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError, ProgrammingError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Column %s already exists on %s, continuing",
|
||||
column_def.split(" ")[1],
|
||||
table_name,
|
||||
)
|
||||
|
||||
|
||||
def _modify_columns(connection, engine, table_name, columns_def):
|
||||
def _modify_columns(instance, engine, table_name, columns_def):
|
||||
"""Modify columns in a table."""
|
||||
if engine.dialect.name == "sqlite":
|
||||
_LOGGER.debug(
|
||||
@@ -260,34 +271,38 @@ def _modify_columns(connection, engine, table_name, columns_def):
|
||||
else:
|
||||
columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
|
||||
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not modify column %s in table %s", column_def, table_name
|
||||
)
|
||||
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not modify column %s in table %s", column_def, table_name
|
||||
)
|
||||
|
||||
|
||||
def _update_states_table_with_foreign_key_options(connection, engine):
|
||||
def _update_states_table_with_foreign_key_options(instance, engine):
|
||||
"""Add the options to foreign key constraints."""
|
||||
inspector = sqlalchemy.inspect(engine)
|
||||
alters = []
|
||||
@@ -315,18 +330,20 @@ def _update_states_table_with_foreign_key_options(connection, engine):
|
||||
)
|
||||
|
||||
for alter in alters:
|
||||
try:
|
||||
connection.execute(DropConstraint(alter["old_fk"]))
|
||||
for fkc in states_key_constraints:
|
||||
if fkc.column_keys == alter["columns"]:
|
||||
connection.execute(AddConstraint(fkc))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not update foreign options in %s table", TABLE_STATES
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(DropConstraint(alter["old_fk"]))
|
||||
for fkc in states_key_constraints:
|
||||
if fkc.column_keys == alter["columns"]:
|
||||
connection.execute(AddConstraint(fkc))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not update foreign options in %s table", TABLE_STATES
|
||||
)
|
||||
|
||||
|
||||
def _drop_foreign_key_constraints(connection, engine, table, columns):
|
||||
def _drop_foreign_key_constraints(instance, engine, table, columns):
|
||||
"""Drop foreign key constraints for a table on specific columns."""
|
||||
inspector = sqlalchemy.inspect(engine)
|
||||
drops = []
|
||||
@@ -344,27 +361,28 @@ def _drop_foreign_key_constraints(connection, engine, table, columns):
|
||||
)
|
||||
|
||||
for drop in drops:
|
||||
try:
|
||||
connection.execute(DropConstraint(drop))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not drop foreign constraints in %s table on %s",
|
||||
TABLE_STATES,
|
||||
columns,
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(DropConstraint(drop))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not drop foreign constraints in %s table on %s",
|
||||
TABLE_STATES,
|
||||
columns,
|
||||
)
|
||||
|
||||
|
||||
def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
def _apply_update(instance, new_version, old_version): # noqa: C901
|
||||
"""Perform operations to bring schema up to date."""
|
||||
engine = instance.engine
|
||||
connection = session.connection()
|
||||
if new_version == 1:
|
||||
_create_index(connection, "events", "ix_events_time_fired")
|
||||
_create_index(instance, "events", "ix_events_time_fired")
|
||||
elif new_version == 2:
|
||||
# Create compound start/end index for recorder_runs
|
||||
_create_index(connection, "recorder_runs", "ix_recorder_runs_start_end")
|
||||
_create_index(instance, "recorder_runs", "ix_recorder_runs_start_end")
|
||||
# Create indexes for states
|
||||
_create_index(connection, "states", "ix_states_last_updated")
|
||||
_create_index(instance, "states", "ix_states_last_updated")
|
||||
elif new_version == 3:
|
||||
# There used to be a new index here, but it was removed in version 4.
|
||||
pass
|
||||
@@ -374,41 +392,41 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
|
||||
if old_version == 3:
|
||||
# Remove index that was added in version 3
|
||||
_drop_index(connection, "states", "ix_states_created_domain")
|
||||
_drop_index(instance, "states", "ix_states_created_domain")
|
||||
if old_version == 2:
|
||||
# Remove index that was added in version 2
|
||||
_drop_index(connection, "states", "ix_states_entity_id_created")
|
||||
_drop_index(instance, "states", "ix_states_entity_id_created")
|
||||
|
||||
# Remove indexes that were added in version 0
|
||||
_drop_index(connection, "states", "states__state_changes")
|
||||
_drop_index(connection, "states", "states__significant_changes")
|
||||
_drop_index(connection, "states", "ix_states_entity_id_created")
|
||||
_drop_index(instance, "states", "states__state_changes")
|
||||
_drop_index(instance, "states", "states__significant_changes")
|
||||
_drop_index(instance, "states", "ix_states_entity_id_created")
|
||||
|
||||
_create_index(connection, "states", "ix_states_entity_id_last_updated")
|
||||
_create_index(instance, "states", "ix_states_entity_id_last_updated")
|
||||
elif new_version == 5:
|
||||
# Create supporting index for States.event_id foreign key
|
||||
_create_index(connection, "states", "ix_states_event_id")
|
||||
_create_index(instance, "states", "ix_states_event_id")
|
||||
elif new_version == 6:
|
||||
_add_columns(
|
||||
session,
|
||||
instance,
|
||||
"events",
|
||||
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
|
||||
)
|
||||
_create_index(connection, "events", "ix_events_context_id")
|
||||
_create_index(connection, "events", "ix_events_context_user_id")
|
||||
_create_index(instance, "events", "ix_events_context_id")
|
||||
_create_index(instance, "events", "ix_events_context_user_id")
|
||||
_add_columns(
|
||||
connection,
|
||||
instance,
|
||||
"states",
|
||||
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
|
||||
)
|
||||
_create_index(connection, "states", "ix_states_context_id")
|
||||
_create_index(connection, "states", "ix_states_context_user_id")
|
||||
_create_index(instance, "states", "ix_states_context_id")
|
||||
_create_index(instance, "states", "ix_states_context_user_id")
|
||||
elif new_version == 7:
|
||||
_create_index(connection, "states", "ix_states_entity_id")
|
||||
_create_index(instance, "states", "ix_states_entity_id")
|
||||
elif new_version == 8:
|
||||
_add_columns(connection, "events", ["context_parent_id CHARACTER(36)"])
|
||||
_add_columns(connection, "states", ["old_state_id INTEGER"])
|
||||
_create_index(connection, "events", "ix_events_context_parent_id")
|
||||
_add_columns(instance, "events", ["context_parent_id CHARACTER(36)"])
|
||||
_add_columns(instance, "states", ["old_state_id INTEGER"])
|
||||
_create_index(instance, "events", "ix_events_context_parent_id")
|
||||
elif new_version == 9:
|
||||
# We now get the context from events with a join
|
||||
# since its always there on state_changed events
|
||||
@@ -418,36 +436,36 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
# and we would have to move to something like
|
||||
# sqlalchemy alembic to make that work
|
||||
#
|
||||
_drop_index(connection, "states", "ix_states_context_id")
|
||||
_drop_index(connection, "states", "ix_states_context_user_id")
|
||||
_drop_index(instance, "states", "ix_states_context_id")
|
||||
_drop_index(instance, "states", "ix_states_context_user_id")
|
||||
# This index won't be there if they were not running
|
||||
# nightly but we don't treat that as a critical issue
|
||||
_drop_index(connection, "states", "ix_states_context_parent_id")
|
||||
_drop_index(instance, "states", "ix_states_context_parent_id")
|
||||
# Redundant keys on composite index:
|
||||
# We already have ix_states_entity_id_last_updated
|
||||
_drop_index(connection, "states", "ix_states_entity_id")
|
||||
_create_index(connection, "events", "ix_events_event_type_time_fired")
|
||||
_drop_index(connection, "events", "ix_events_event_type")
|
||||
_drop_index(instance, "states", "ix_states_entity_id")
|
||||
_create_index(instance, "events", "ix_events_event_type_time_fired")
|
||||
_drop_index(instance, "events", "ix_events_event_type")
|
||||
elif new_version == 10:
|
||||
# Now done in step 11
|
||||
pass
|
||||
elif new_version == 11:
|
||||
_create_index(connection, "states", "ix_states_old_state_id")
|
||||
_update_states_table_with_foreign_key_options(connection, engine)
|
||||
_create_index(instance, "states", "ix_states_old_state_id")
|
||||
_update_states_table_with_foreign_key_options(instance, engine)
|
||||
elif new_version == 12:
|
||||
if engine.dialect.name == "mysql":
|
||||
_modify_columns(connection, engine, "events", ["event_data LONGTEXT"])
|
||||
_modify_columns(connection, engine, "states", ["attributes LONGTEXT"])
|
||||
_modify_columns(instance, engine, "events", ["event_data LONGTEXT"])
|
||||
_modify_columns(instance, engine, "states", ["attributes LONGTEXT"])
|
||||
elif new_version == 13:
|
||||
if engine.dialect.name == "mysql":
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"events",
|
||||
["time_fired DATETIME(6)", "created DATETIME(6)"],
|
||||
)
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"states",
|
||||
[
|
||||
@@ -457,14 +475,12 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
],
|
||||
)
|
||||
elif new_version == 14:
|
||||
_modify_columns(connection, engine, "events", ["event_type VARCHAR(64)"])
|
||||
_modify_columns(instance, engine, "events", ["event_type VARCHAR(64)"])
|
||||
elif new_version == 15:
|
||||
# This dropped the statistics table, done again in version 18.
|
||||
pass
|
||||
elif new_version == 16:
|
||||
_drop_foreign_key_constraints(
|
||||
connection, engine, TABLE_STATES, ["old_state_id"]
|
||||
)
|
||||
_drop_foreign_key_constraints(instance, engine, TABLE_STATES, ["old_state_id"])
|
||||
elif new_version == 17:
|
||||
# This dropped the statistics table, done again in version 18.
|
||||
pass
|
||||
@@ -489,12 +505,13 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
elif new_version == 19:
|
||||
# This adds the statistic runs table, insert a fake run to prevent duplicating
|
||||
# statistics.
|
||||
session.add(StatisticsRuns(start=get_start_time()))
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
session.add(StatisticsRuns(start=get_start_time()))
|
||||
elif new_version == 20:
|
||||
# This changed the precision of statistics from float to double
|
||||
if engine.dialect.name in ["mysql", "postgresql"]:
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"statistics",
|
||||
[
|
||||
@@ -516,14 +533,16 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
table,
|
||||
)
|
||||
with contextlib.suppress(SQLAlchemyError):
|
||||
connection.execute(
|
||||
# Using LOCK=EXCLUSIVE to prevent the database from corrupting
|
||||
# https://github.com/home-assistant/core/issues/56104
|
||||
text(
|
||||
f"ALTER TABLE {table} CONVERT TO "
|
||||
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE"
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
# Using LOCK=EXCLUSIVE to prevent the database from corrupting
|
||||
# https://github.com/home-assistant/core/issues/56104
|
||||
text(
|
||||
f"ALTER TABLE {table} CONVERT TO "
|
||||
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE"
|
||||
)
|
||||
)
|
||||
)
|
||||
elif new_version == 22:
|
||||
# Recreate the all statistics tables for Oracle DB with Identity columns
|
||||
#
|
||||
@@ -549,60 +568,76 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
# Block 5-minute statistics for one hour from the last run, or it will overlap
|
||||
# with existing hourly statistics. Don't block on a database with no existing
|
||||
# statistics.
|
||||
if session.query(Statistics.id).count() and (
|
||||
last_run_string := session.query(func.max(StatisticsRuns.start)).scalar()
|
||||
):
|
||||
last_run_start_time = process_timestamp(last_run_string)
|
||||
if last_run_start_time:
|
||||
fake_start_time = last_run_start_time + timedelta(minutes=5)
|
||||
while fake_start_time < last_run_start_time + timedelta(hours=1):
|
||||
session.add(StatisticsRuns(start=fake_start_time))
|
||||
fake_start_time += timedelta(minutes=5)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
if session.query(Statistics.id).count() and (
|
||||
last_run_string := session.query(
|
||||
func.max(StatisticsRuns.start)
|
||||
).scalar()
|
||||
):
|
||||
last_run_start_time = process_timestamp(last_run_string)
|
||||
if last_run_start_time:
|
||||
fake_start_time = last_run_start_time + timedelta(minutes=5)
|
||||
while fake_start_time < last_run_start_time + timedelta(hours=1):
|
||||
session.add(StatisticsRuns(start=fake_start_time))
|
||||
fake_start_time += timedelta(minutes=5)
|
||||
|
||||
# When querying the database, be careful to only explicitly query for columns
|
||||
# which were present in schema version 21. If querying the table, SQLAlchemy
|
||||
# will refer to future columns.
|
||||
for sum_statistic in session.query(StatisticsMeta.id).filter_by(has_sum=true()):
|
||||
last_statistic = (
|
||||
session.query(
|
||||
Statistics.start,
|
||||
Statistics.last_reset,
|
||||
Statistics.state,
|
||||
Statistics.sum,
|
||||
)
|
||||
.filter_by(metadata_id=sum_statistic.id)
|
||||
.order_by(Statistics.start.desc())
|
||||
.first()
|
||||
)
|
||||
if last_statistic:
|
||||
session.add(
|
||||
StatisticsShortTerm(
|
||||
metadata_id=sum_statistic.id,
|
||||
start=last_statistic.start,
|
||||
last_reset=last_statistic.last_reset,
|
||||
state=last_statistic.state,
|
||||
sum=last_statistic.sum,
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
for sum_statistic in session.query(StatisticsMeta.id).filter_by(
|
||||
has_sum=true()
|
||||
):
|
||||
last_statistic = (
|
||||
session.query(
|
||||
Statistics.start,
|
||||
Statistics.last_reset,
|
||||
Statistics.state,
|
||||
Statistics.sum,
|
||||
)
|
||||
.filter_by(metadata_id=sum_statistic.id)
|
||||
.order_by(Statistics.start.desc())
|
||||
.first()
|
||||
)
|
||||
if last_statistic:
|
||||
session.add(
|
||||
StatisticsShortTerm(
|
||||
metadata_id=sum_statistic.id,
|
||||
start=last_statistic.start,
|
||||
last_reset=last_statistic.last_reset,
|
||||
state=last_statistic.state,
|
||||
sum=last_statistic.sum,
|
||||
)
|
||||
)
|
||||
elif new_version == 23:
|
||||
# Add name column to StatisticsMeta
|
||||
_add_columns(session, "statistics_meta", ["name VARCHAR(255)"])
|
||||
_add_columns(instance, "statistics_meta", ["name VARCHAR(255)"])
|
||||
elif new_version == 24:
|
||||
# Delete duplicated statistics
|
||||
delete_duplicates(instance, session)
|
||||
# Recreate statistics indices to block duplicated statistics
|
||||
_drop_index(connection, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(connection, "statistics", "ix_statistics_statistic_id_start")
|
||||
_drop_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_drop_index(
|
||||
connection,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
_create_index(
|
||||
connection,
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
try:
|
||||
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
except DatabaseError:
|
||||
# There may be duplicated statistics entries, delete duplicated statistics
|
||||
# and try again
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
delete_duplicates(instance, session)
|
||||
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"No schema migration defined for version {new_version}")
|
||||
|
||||
@@ -119,8 +119,6 @@ QUERY_STATISTIC_META_ID = [
|
||||
StatisticsMeta.statistic_id,
|
||||
]
|
||||
|
||||
MAX_DUPLICATES = 1000000
|
||||
|
||||
STATISTICS_BAKERY = "recorder_statistics_bakery"
|
||||
STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery"
|
||||
STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery"
|
||||
@@ -292,7 +290,7 @@ def _find_duplicates(
|
||||
)
|
||||
.filter(subquery.c.is_duplicate == 1)
|
||||
.order_by(table.metadata_id, table.start, table.id.desc())
|
||||
.limit(MAX_ROWS_TO_PURGE)
|
||||
.limit(1000 * MAX_ROWS_TO_PURGE)
|
||||
)
|
||||
duplicates = execute(query)
|
||||
original_as_dict = {}
|
||||
@@ -345,14 +343,13 @@ def _delete_duplicates_from_table(
|
||||
if not duplicate_ids:
|
||||
break
|
||||
all_non_identical_duplicates.extend(non_identical_duplicates)
|
||||
deleted_rows = (
|
||||
session.query(table)
|
||||
.filter(table.id.in_(duplicate_ids))
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
total_deleted_rows += deleted_rows
|
||||
if total_deleted_rows >= MAX_DUPLICATES:
|
||||
break
|
||||
for i in range(0, len(duplicate_ids), MAX_ROWS_TO_PURGE):
|
||||
deleted_rows = (
|
||||
session.query(table)
|
||||
.filter(table.id.in_(duplicate_ids[i : i + MAX_ROWS_TO_PURGE]))
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
total_deleted_rows += deleted_rows
|
||||
return (total_deleted_rows, all_non_identical_duplicates)
|
||||
|
||||
|
||||
@@ -389,13 +386,6 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None:
|
||||
backup_path,
|
||||
)
|
||||
|
||||
if deleted_statistics_rows >= MAX_DUPLICATES:
|
||||
_LOGGER.warning(
|
||||
"Found more than %s duplicated statistic rows, please report at "
|
||||
'https://github.com/home-assistant/core/issues?q=is%%3Aissue+label%%3A"integration%%3A+recorder"+',
|
||||
MAX_DUPLICATES - 1,
|
||||
)
|
||||
|
||||
deleted_short_term_statistics_rows, _ = _delete_duplicates_from_table(
|
||||
session, StatisticsShortTerm
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renault",
|
||||
"requirements": [
|
||||
"renault-api==0.1.7"
|
||||
"renault-api==0.1.8"
|
||||
],
|
||||
"codeowners": [
|
||||
"@epenet"
|
||||
|
||||
@@ -20,7 +20,6 @@ from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
CONF_USERNAME,
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
MASS_KILOGRAMS,
|
||||
POWER_WATT,
|
||||
@@ -33,6 +32,7 @@ from homeassistant.exceptions import PlatformNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -131,17 +131,19 @@ async def async_setup_platform(
|
||||
|
||||
return values
|
||||
|
||||
@callback
|
||||
def start_update_interval(event):
|
||||
"""Start the update interval scheduling."""
|
||||
nonlocal remove_interval_update
|
||||
remove_interval_update = async_track_time_interval_backoff(hass, async_saj)
|
||||
|
||||
@callback
|
||||
def stop_update_interval(event):
|
||||
"""Properly cancel the scheduled update."""
|
||||
remove_interval_update() # pylint: disable=not-callable
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_update_interval)
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, stop_update_interval)
|
||||
async_at_start(hass, start_update_interval)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
18
homeassistant/components/samsungtv/diagnostics.py
Normal file
18
homeassistant/components/samsungtv/diagnostics.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Diagnostics support for SamsungTV."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
TO_REDACT = {CONF_TOKEN}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a config entry."""
|
||||
diag_data = {"entry": async_redact_data(entry.as_dict(), TO_REDACT)}
|
||||
|
||||
return diag_data
|
||||
@@ -113,8 +113,9 @@ class Scene(RestoreEntity):
|
||||
self.async_write_ha_state()
|
||||
await self.async_activate(**kwargs)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when the button is added to hass."""
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the scene is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state is not None:
|
||||
self.__last_activated = state.state
|
||||
|
||||
@@ -51,7 +51,8 @@ class HASensemeLight(SensemeEntity, LightEntity):
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
self._attr_is_on = self._device.light_on
|
||||
self._attr_brightness = int(min(255, self._device.light_brightness * 16))
|
||||
if self._device.light_brightness is not None:
|
||||
self._attr_brightness = int(min(255, self._device.light_brightness * 16))
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "seven_segments",
|
||||
"name": "Seven Segments OCR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/seven_segments",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"codeowners": ["@fabaff"],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==1.0.8"],
|
||||
"requirements": ["aioshelly==1.0.9"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
||||
@@ -264,7 +264,8 @@ def get_model_name(info: dict[str, Any]) -> str:
|
||||
|
||||
def get_rpc_channel_name(device: RpcDevice, key: str) -> str:
|
||||
"""Get name based on device and channel name."""
|
||||
key = key.replace("input", "switch")
|
||||
if device.config.get("switch:0"):
|
||||
key = key.replace("input", "switch")
|
||||
device_name = get_rpc_device_name(device)
|
||||
entity_name: str | None = device.config[key].get("name", device_name)
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class ShodanSensor(SensorEntity):
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and updates the states."""
|
||||
data = self.data.update()
|
||||
self._attr_native_value = data.details["total"]
|
||||
self._attr_native_value = data["total"]
|
||||
|
||||
|
||||
class ShodanData:
|
||||
|
||||
@@ -38,3 +38,4 @@ KEY_MOISTURE: Final = "moisture"
|
||||
KEY_POWER: Final = "power"
|
||||
|
||||
PREVIOUS_STATE: Final = "previous_state"
|
||||
AVAILABILITY_EVENT_CODE: Final = "RP"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user