mirror of
https://github.com/home-assistant/core.git
synced 2026-01-04 14:55:39 +01:00
Compare commits
145 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e33671db60 | ||
|
|
7d2e42d026 | ||
|
|
6aa99d1063 | ||
|
|
de96d21ea9 | ||
|
|
7bb14aae23 | ||
|
|
8b39866bb7 | ||
|
|
addc6bce63 | ||
|
|
104f56a01d | ||
|
|
6472cb8721 | ||
|
|
5cccac7a19 | ||
|
|
96cbae5393 | ||
|
|
e5e3ab377d | ||
|
|
f46fbcc9eb | ||
|
|
aad9992c9a | ||
|
|
398f60c3d0 | ||
|
|
312c31afda | ||
|
|
d315890c61 | ||
|
|
51dd3c88e9 | ||
|
|
23a68f5d49 | ||
|
|
10c7725a90 | ||
|
|
fc2d30c993 | ||
|
|
cb7f7dff72 | ||
|
|
c254598331 | ||
|
|
646c56e0e9 | ||
|
|
f3a3ff28f2 | ||
|
|
087f443368 | ||
|
|
fcee1ff865 | ||
|
|
6084b323df | ||
|
|
6857562e9e | ||
|
|
aef2588f9c | ||
|
|
27c5460feb | ||
|
|
60b4600019 | ||
|
|
27752f7ad3 | ||
|
|
669c99474b | ||
|
|
76872e3789 | ||
|
|
2594500452 | ||
|
|
65c8363323 | ||
|
|
dfcad3a13d | ||
|
|
92bc780dd7 | ||
|
|
eb781060e8 | ||
|
|
a2e7897b1e | ||
|
|
854308fec2 | ||
|
|
0199e8cc43 | ||
|
|
7cc9a4310d | ||
|
|
5976238126 | ||
|
|
caedef5f1a | ||
|
|
a96b91d120 | ||
|
|
2e6ee5165e | ||
|
|
7dd7c1dadd | ||
|
|
4c548af6ef | ||
|
|
200e07b8d6 | ||
|
|
ae5a885387 | ||
|
|
bebdaacf47 | ||
|
|
339fc0a2af | ||
|
|
f44ca5f9d5 | ||
|
|
a869c1bc88 | ||
|
|
d5443b8dee | ||
|
|
6ec09320dd | ||
|
|
550f80ddd2 | ||
|
|
23d2168952 | ||
|
|
c1cb0a0f8e | ||
|
|
e53227be79 | ||
|
|
c8c1543b26 | ||
|
|
715fe95abd | ||
|
|
02cb879717 | ||
|
|
9734216215 | ||
|
|
0f06ebde06 | ||
|
|
7195372616 | ||
|
|
ac63a7e01e | ||
|
|
f08ebf5b7e | ||
|
|
49d6048278 | ||
|
|
ceae63d457 | ||
|
|
f170aba0cc | ||
|
|
66e076b57f | ||
|
|
1338b347b5 | ||
|
|
9b471ab653 | ||
|
|
e90a6bbe1c | ||
|
|
aa9965675d | ||
|
|
ad3b2f02b4 | ||
|
|
0dbe9b7cf4 | ||
|
|
b9d346baed | ||
|
|
7791711603 | ||
|
|
fdfffcb73e | ||
|
|
8e6bd840a4 | ||
|
|
619a52a387 | ||
|
|
a4d59aa599 | ||
|
|
4ba494f5cd | ||
|
|
7a7f9deb89 | ||
|
|
5786f68bb7 | ||
|
|
bccfe6646e | ||
|
|
fc7ea6e1b3 | ||
|
|
058420bb2f | ||
|
|
9695235920 | ||
|
|
57526bd21f | ||
|
|
eff9690c8a | ||
|
|
d754ea1645 | ||
|
|
5f6214ede7 | ||
|
|
0f02ae981d | ||
|
|
51abdf9c63 | ||
|
|
1a2e9aaaed | ||
|
|
56d1fc6dad | ||
|
|
5a44f8eadd | ||
|
|
609661a862 | ||
|
|
27dbf98dae | ||
|
|
6cf2665200 | ||
|
|
5aa02b884e | ||
|
|
84b2ec2244 | ||
|
|
35f2536d46 | ||
|
|
e6e95a1131 | ||
|
|
ea1245f308 | ||
|
|
9cd6bb7335 | ||
|
|
4e3cd1471a | ||
|
|
67a9932c5c | ||
|
|
0efa276fca | ||
|
|
c6d5a0842b | ||
|
|
b004c5deb6 | ||
|
|
06b6b176db | ||
|
|
9eeaec4f79 | ||
|
|
3babc43fa5 | ||
|
|
a670317b80 | ||
|
|
f44f1f0c4a | ||
|
|
b450a41d7b | ||
|
|
88c3ab1113 | ||
|
|
f1c8fc241a | ||
|
|
e5b9d5baa3 | ||
|
|
3c43089cc2 | ||
|
|
c8827e00b3 | ||
|
|
3a1a12b13e | ||
|
|
2928935838 | ||
|
|
b9a37e2c3e | ||
|
|
92f4f99d41 | ||
|
|
e32a54eecc | ||
|
|
2bb65ecf38 | ||
|
|
afbc55b181 | ||
|
|
931c27f452 | ||
|
|
689133976a | ||
|
|
faa8ac692e | ||
|
|
ec0b0e41a1 | ||
|
|
6550d04313 | ||
|
|
8d33964e4d | ||
|
|
d195e8a1b4 | ||
|
|
a8b29c4be9 | ||
|
|
f7ec373aab | ||
|
|
1ae2bfcc89 | ||
|
|
1155d229f3 |
@@ -264,6 +264,7 @@ tests/components/enphase_envoy/* @gtdiehl
|
||||
homeassistant/components/entur_public_transport/* @hfurubotten
|
||||
homeassistant/components/environment_canada/* @gwww @michaeldavie
|
||||
tests/components/environment_canada/* @gwww @michaeldavie
|
||||
homeassistant/components/envisalink/* @ufodone
|
||||
homeassistant/components/ephember/* @ttroy50
|
||||
homeassistant/components/epson/* @pszafer
|
||||
tests/components/epson/* @pszafer
|
||||
|
||||
@@ -17,7 +17,12 @@ from homeassistant.components.weather import (
|
||||
WeatherEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, TEMP_CELSIUS, TEMP_FAHRENHEIT
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
SPEED_MILES_PER_HOUR,
|
||||
TEMP_CELSIUS,
|
||||
TEMP_FAHRENHEIT,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
@@ -62,6 +67,13 @@ class AccuWeatherEntity(CoordinatorEntity, WeatherEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL
|
||||
wind_speed_unit = self.coordinator.data["Wind"]["Speed"][self._unit_system][
|
||||
"Unit"
|
||||
]
|
||||
if wind_speed_unit == "mi/h":
|
||||
self._attr_wind_speed_unit = SPEED_MILES_PER_HOUR
|
||||
else:
|
||||
self._attr_wind_speed_unit = wind_speed_unit
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_temperature_unit = (
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"@Bre77"
|
||||
],
|
||||
"requirements": [
|
||||
"advantage_air==0.2.5"
|
||||
"advantage_air==0.3.0"
|
||||
],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -515,8 +515,8 @@ class AmcrestCam(Camera):
|
||||
max_tries = 3
|
||||
for tries in range(max_tries, 0, -1):
|
||||
try:
|
||||
await getattr(self, f"_set_{func}")(value)
|
||||
new_value = await getattr(self, f"_get_{func}")()
|
||||
await getattr(self, f"_async_set_{func}")(value)
|
||||
new_value = await getattr(self, f"_async_get_{func}")()
|
||||
if new_value != value:
|
||||
raise AmcrestCommandFailed
|
||||
except (AmcrestError, AmcrestCommandFailed) as error:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "amcrest",
|
||||
"name": "Amcrest",
|
||||
"documentation": "https://www.home-assistant.io/integrations/amcrest",
|
||||
"requirements": ["amcrest==1.9.3"],
|
||||
"requirements": ["amcrest==1.9.4"],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": ["@flacjacket"],
|
||||
"iot_class": "local_polling"
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -33,16 +34,30 @@ from .const import (
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_SERIALNO,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
_INVALID_MACS = {"ff:ff:ff:ff:ff:ff"}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_androidtv_mac(dev_props):
|
||||
"""Return formatted mac from device properties."""
|
||||
for prop_mac in (PROP_ETHMAC, PROP_WIFIMAC):
|
||||
if if_mac := dev_props.get(prop_mac):
|
||||
mac = format_mac(if_mac)
|
||||
if mac not in _INVALID_MACS:
|
||||
return mac
|
||||
return None
|
||||
|
||||
|
||||
def _setup_androidtv(hass, config):
|
||||
"""Generate an ADB key (if needed) and load it."""
|
||||
adbkey = config.get(CONF_ADBKEY, hass.config.path(STORAGE_DIR, "androidtv_adbkey"))
|
||||
|
||||
@@ -11,9 +11,8 @@ from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from . import async_connect_androidtv
|
||||
from . import async_connect_androidtv, get_androidtv_mac
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
CONF_ADB_SERVER_PORT,
|
||||
@@ -124,9 +123,15 @@ class AndroidTVFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
return RESULT_CONN_ERROR, None
|
||||
|
||||
dev_prop = aftv.device_properties
|
||||
unique_id = format_mac(
|
||||
dev_prop.get(PROP_ETHMAC) or dev_prop.get(PROP_WIFIMAC, "")
|
||||
_LOGGER.info(
|
||||
"Android TV at %s: %s = %r, %s = %r",
|
||||
user_input[CONF_HOST],
|
||||
PROP_ETHMAC,
|
||||
dev_prop.get(PROP_ETHMAC),
|
||||
PROP_WIFIMAC,
|
||||
dev_prop.get(PROP_WIFIMAC),
|
||||
)
|
||||
unique_id = get_androidtv_mac(dev_prop)
|
||||
await aftv.adb_close()
|
||||
return None, unique_id
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.4.0",
|
||||
"androidtv[async]==0.0.61",
|
||||
"androidtv[async]==0.0.63",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
],
|
||||
"codeowners": ["@JeffLIrion", "@ollo69"],
|
||||
|
||||
@@ -51,12 +51,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import get_androidtv_mac
|
||||
from .const import (
|
||||
ANDROID_DEV,
|
||||
ANDROID_DEV_OPT,
|
||||
@@ -80,8 +81,6 @@ from .const import (
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
|
||||
@@ -343,7 +342,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
self._attr_device_info[ATTR_MANUFACTURER] = manufacturer
|
||||
if sw_version := info.get(ATTR_SW_VERSION):
|
||||
self._attr_device_info[ATTR_SW_VERSION] = sw_version
|
||||
if mac := format_mac(info.get(PROP_ETHMAC) or info.get(PROP_WIFIMAC, "")):
|
||||
if mac := get_androidtv_mac(info):
|
||||
self._attr_device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, mac)}
|
||||
|
||||
self._app_id_to_name = {}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Aseko Pool Live",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
||||
"requirements": ["aioaseko==0.0.1"],
|
||||
"requirements": ["aioaseko==0.0.2"],
|
||||
"codeowners": [
|
||||
"@milanmeu"
|
||||
],
|
||||
|
||||
@@ -45,7 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except asyncio.TimeoutError as err:
|
||||
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
||||
except (ClientResponseError, CannotConnect) as err:
|
||||
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
|
||||
@@ -75,6 +75,7 @@ async def async_setup_august(
|
||||
hass.config_entries.async_update_entry(config_entry, data=config_data)
|
||||
|
||||
await august_gateway.async_authenticate()
|
||||
await august_gateway.async_refresh_access_token_if_needed()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
data = hass.data[DOMAIN][config_entry.entry_id] = {
|
||||
@@ -106,11 +107,10 @@ class AugustData(AugustSubscriberMixin):
|
||||
async def async_setup(self):
|
||||
"""Async setup of august device data and activities."""
|
||||
token = self._august_gateway.access_token
|
||||
user_data, locks, doorbells = await asyncio.gather(
|
||||
self._api.async_get_user(token),
|
||||
self._api.async_get_operable_locks(token),
|
||||
self._api.async_get_doorbells(token),
|
||||
)
|
||||
# This used to be a gather but it was less reliable with august's recent api changes.
|
||||
user_data = await self._api.async_get_user(token)
|
||||
locks = await self._api.async_get_operable_locks(token)
|
||||
doorbells = await self._api.async_get_doorbells(token)
|
||||
if not doorbells:
|
||||
doorbells = []
|
||||
if not locks:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "august",
|
||||
"name": "August",
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"requirements": ["yalexs==1.1.20"],
|
||||
"requirements": ["yalexs==1.1.22"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Bosch SHC",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||
"requirements": ["boschshcpy==0.2.28"],
|
||||
"requirements": ["boschshcpy==0.2.29"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "bosch shc*" }],
|
||||
"iot_class": "local_push",
|
||||
"codeowners": ["@tschamm"],
|
||||
|
||||
@@ -113,8 +113,9 @@ class ButtonEntity(RestoreEntity):
|
||||
self.async_write_ha_state()
|
||||
await self.async_press()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the button is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state is not None:
|
||||
self.__last_pressed = dt_util.parse_datetime(state.state)
|
||||
|
||||
@@ -232,7 +232,11 @@ class WebDavCalendarData:
|
||||
new_events.append(new_event)
|
||||
elif _start_of_tomorrow <= start_dt:
|
||||
break
|
||||
vevents = [event.instance.vevent for event in results + new_events]
|
||||
vevents = [
|
||||
event.instance.vevent
|
||||
for event in results + new_events
|
||||
if hasattr(event.instance, "vevent")
|
||||
]
|
||||
|
||||
# dtstart can be a date or datetime depending if the event lasts a
|
||||
# whole day. Convert everything to datetime to be able to sort it
|
||||
|
||||
@@ -222,7 +222,12 @@ async def async_get_mjpeg_stream(
|
||||
"""Fetch an mjpeg stream from a camera entity."""
|
||||
camera = _get_camera_from_entity_id(hass, entity_id)
|
||||
|
||||
return await camera.handle_async_mjpeg_stream(request)
|
||||
try:
|
||||
stream = await camera.handle_async_mjpeg_stream(request)
|
||||
except ConnectionResetError:
|
||||
stream = None
|
||||
_LOGGER.debug("Error while writing MJPEG stream to transport")
|
||||
return stream
|
||||
|
||||
|
||||
async def async_get_still_stream(
|
||||
@@ -784,7 +789,11 @@ class CameraMjpegStream(CameraView):
|
||||
async def handle(self, request: web.Request, camera: Camera) -> web.StreamResponse:
|
||||
"""Serve camera stream, possibly with interval."""
|
||||
if (interval_str := request.query.get("interval")) is None:
|
||||
stream = await camera.handle_async_mjpeg_stream(request)
|
||||
try:
|
||||
stream = await camera.handle_async_mjpeg_stream(request)
|
||||
except ConnectionResetError:
|
||||
stream = None
|
||||
_LOGGER.debug("Error while writing MJPEG stream to transport")
|
||||
if stream is None:
|
||||
raise web.HTTPBadGateway()
|
||||
return stream
|
||||
|
||||
@@ -50,6 +50,7 @@ from homeassistant.const import (
|
||||
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_IDLE,
|
||||
STATE_OFF,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
)
|
||||
@@ -636,7 +637,7 @@ class CastDevice(MediaPlayerEntity):
|
||||
return STATE_PLAYING
|
||||
return STATE_IDLE
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
return STATE_IDLE
|
||||
return STATE_OFF
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -62,6 +62,9 @@ async def websocket_update_device(hass, connection, msg):
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
|
||||
if msg.get("disabled_by") is not None:
|
||||
msg["disabled_by"] = DeviceEntryDisabler(msg["disabled_by"])
|
||||
|
||||
entry = registry.async_update_device(**msg)
|
||||
|
||||
connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry)))
|
||||
|
||||
@@ -81,8 +81,8 @@ class CPUSpeedSensor(SensorEntity):
|
||||
|
||||
if info:
|
||||
self._attr_extra_state_attributes = {
|
||||
ATTR_ARCH: info["arch_string_raw"],
|
||||
ATTR_BRAND: info["brand_raw"],
|
||||
ATTR_ARCH: info.get("arch_string_raw"),
|
||||
ATTR_BRAND: info.get("brand_raw"),
|
||||
}
|
||||
if HZ_ADVERTISED in info:
|
||||
self._attr_extra_state_attributes[ATTR_HZ] = round(
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"cloud",
|
||||
"counter",
|
||||
"dhcp",
|
||||
"diagnostics",
|
||||
"energy",
|
||||
"frontend",
|
||||
"history",
|
||||
|
||||
@@ -179,6 +179,7 @@ class WatcherBase:
|
||||
lowercase_hostname,
|
||||
)
|
||||
|
||||
matched_domains = set()
|
||||
for entry in self._integration_matchers:
|
||||
if MAC_ADDRESS in entry and not fnmatch.fnmatch(
|
||||
uppercase_mac, entry[MAC_ADDRESS]
|
||||
@@ -191,6 +192,11 @@ class WatcherBase:
|
||||
continue
|
||||
|
||||
_LOGGER.debug("Matched %s against %s", data, entry)
|
||||
if entry["domain"] in matched_domains:
|
||||
# Only match once per domain
|
||||
continue
|
||||
|
||||
matched_domains.add(entry["domain"])
|
||||
discovery_flow.async_create_flow(
|
||||
self.hass,
|
||||
entry["domain"],
|
||||
|
||||
@@ -33,6 +33,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA_ADV = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
vol.Optional(CONF_RESOLVER, default=DEFAULT_RESOLVER): cv.string,
|
||||
vol.Optional(CONF_RESOLVER_IPV6, default=DEFAULT_RESOLVER_IPV6): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_hostname(
|
||||
@@ -94,8 +101,8 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
hostname = user_input[CONF_HOSTNAME]
|
||||
name = DEFAULT_NAME if hostname == DEFAULT_HOSTNAME else hostname
|
||||
resolver = DEFAULT_RESOLVER
|
||||
resolver_ipv6 = DEFAULT_RESOLVER_IPV6
|
||||
resolver = user_input.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
resolver_ipv6 = user_input.get(CONF_RESOLVER_IPV6, DEFAULT_RESOLVER_IPV6)
|
||||
|
||||
validate = await async_validate_hostname(hostname, resolver, resolver_ipv6)
|
||||
|
||||
@@ -119,6 +126,12 @@ class DnsIPConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
if self.show_advanced_options is True:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA_ADV,
|
||||
errors=errors,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "The hostname for which to perform the DNS query"
|
||||
"hostname": "The hostname for which to perform the DNS query",
|
||||
"resolver": "Resolver for IPV4 lookup",
|
||||
"resolver_ipv6": "Resolver for IPV6 lookup"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "doods",
|
||||
"name": "DOODS - Dedicated Open Object Detection Service",
|
||||
"documentation": "https://www.home-assistant.io/integrations/doods",
|
||||
"requirements": ["pydoods==1.0.2", "pillow==9.0.0"],
|
||||
"requirements": ["pydoods==1.0.2", "pillow==9.0.1"],
|
||||
"codeowners": [],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -137,6 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
keep_alive,
|
||||
hass.loop,
|
||||
connection_timeout,
|
||||
False,
|
||||
)
|
||||
hass.data[DATA_EVL] = controller
|
||||
|
||||
@@ -181,12 +182,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
_LOGGER.debug("The envisalink sent a partition update event")
|
||||
async_dispatcher_send(hass, SIGNAL_PARTITION_UPDATE, data)
|
||||
|
||||
@callback
|
||||
def async_zone_bypass_update(data):
|
||||
"""Handle zone bypass status updates."""
|
||||
_LOGGER.debug("Envisalink sent a zone bypass update event. Updating zones")
|
||||
async_dispatcher_send(hass, SIGNAL_ZONE_BYPASS_UPDATE, data)
|
||||
|
||||
@callback
|
||||
def stop_envisalink(event):
|
||||
"""Shutdown envisalink connection and thread on exit."""
|
||||
@@ -206,7 +201,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
controller.callback_login_failure = async_login_fail_callback
|
||||
controller.callback_login_timeout = async_connection_fail_callback
|
||||
controller.callback_login_success = async_connection_success_callback
|
||||
controller.callback_zone_bypass_update = async_zone_bypass_update
|
||||
|
||||
_LOGGER.info("Start envisalink")
|
||||
controller.start()
|
||||
@@ -240,13 +234,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass, Platform.BINARY_SENSOR, "envisalink", {CONF_ZONES: zones}, config
|
||||
)
|
||||
)
|
||||
# Only DSC panels support getting zone bypass status
|
||||
if panel_type == PANEL_TYPE_DSC:
|
||||
hass.async_create_task(
|
||||
async_load_platform(
|
||||
hass, "switch", "envisalink", {CONF_ZONES: zones}, config
|
||||
)
|
||||
)
|
||||
|
||||
# Zone bypass switches are not currently created due to an issue with some panels.
|
||||
# These switches will be re-added in the future after some further refactoring of the integration.
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CUSTOM_FUNCTION, handle_custom_function, schema=SERVICE_SCHEMA
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "envisalink",
|
||||
"name": "Envisalink",
|
||||
"documentation": "https://www.home-assistant.io/integrations/envisalink",
|
||||
"requirements": ["pyenvisalink==4.3"],
|
||||
"codeowners": [],
|
||||
"requirements": ["pyenvisalink==4.4"],
|
||||
"codeowners": ["@ufodone"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "ESPHome",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/esphome",
|
||||
"requirements": ["aioesphomeapi==10.8.1"],
|
||||
"requirements": ["aioesphomeapi==10.8.2"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."],
|
||||
"codeowners": ["@OttoWinter", "@jesserockz"],
|
||||
"after_dependencies": ["zeroconf", "tag"],
|
||||
|
||||
@@ -59,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_interval=DEFAULT_UPDATE_INTERVAL,
|
||||
update_method=partial(async_update, api_category),
|
||||
)
|
||||
data_init_tasks.append(coordinator.async_refresh())
|
||||
data_init_tasks.append(coordinator.async_config_entry_first_refresh())
|
||||
|
||||
await asyncio.gather(*data_init_tasks)
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||
"requirements": ["flux_led==0.28.17"],
|
||||
"requirements": ["flux_led==0.28.26"],
|
||||
"quality_scale": "platinum",
|
||||
"codeowners": ["@icemanch", "@bdraco"],
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -12,10 +12,9 @@ from homeassistant.components.button import (
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ENTITY_CATEGORY_CONFIG
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .common import AvmWrapper
|
||||
@@ -41,28 +40,28 @@ BUTTONS: Final = [
|
||||
key="firmware_update",
|
||||
name="Firmware Update",
|
||||
device_class=ButtonDeviceClass.UPDATE,
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda avm_wrapper: avm_wrapper.async_trigger_firmware_update(),
|
||||
),
|
||||
FritzButtonDescription(
|
||||
key="reboot",
|
||||
name="Reboot",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda avm_wrapper: avm_wrapper.async_trigger_reboot(),
|
||||
),
|
||||
FritzButtonDescription(
|
||||
key="reconnect",
|
||||
name="Reconnect",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda avm_wrapper: avm_wrapper.async_trigger_reconnect(),
|
||||
),
|
||||
FritzButtonDescription(
|
||||
key="cleanup",
|
||||
name="Cleanup",
|
||||
icon="mdi:broom",
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda avm_wrapper: avm_wrapper.async_trigger_cleanup(),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -155,7 +155,8 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
self.hass = hass
|
||||
self.host = host
|
||||
self.mesh_role = MeshRoles.NONE
|
||||
self.device_is_router: bool = True
|
||||
self.device_conn_type: str | None = None
|
||||
self.device_is_router: bool = False
|
||||
self.password = password
|
||||
self.port = port
|
||||
self.username = username
|
||||
@@ -213,15 +214,23 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
self._current_firmware = info.get("NewSoftwareVersion")
|
||||
|
||||
self._update_available, self._latest_firmware = self._update_device_info()
|
||||
self.device_is_router = "WANIPConn1" in self.connection.services
|
||||
if "Layer3Forwarding1" in self.connection.services:
|
||||
if connection_type := self.connection.call_action(
|
||||
"Layer3Forwarding1", "GetDefaultConnectionService"
|
||||
).get("NewDefaultConnectionService"):
|
||||
# Return NewDefaultConnectionService sample: "1.WANPPPConnection.1"
|
||||
self.device_conn_type = connection_type[2:][:-2]
|
||||
self.device_is_router = self.connection.call_action(
|
||||
self.device_conn_type, "GetInfo"
|
||||
).get("NewEnable")
|
||||
|
||||
@callback
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update FritzboxTools data."""
|
||||
try:
|
||||
await self.async_scan_devices()
|
||||
except (FritzSecurityError, FritzConnectionException) as ex:
|
||||
raise update_coordinator.UpdateFailed from ex
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
raise update_coordinator.UpdateFailed(ex) from ex
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
@@ -294,11 +303,19 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
|
||||
def _get_wan_access(self, ip_address: str) -> bool | None:
|
||||
"""Get WAN access rule for given IP address."""
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
try:
|
||||
return not self.connection.call_action(
|
||||
"X_AVM-DE_HostFilter:1",
|
||||
"GetWANAccessByIP",
|
||||
NewIPv4Address=ip_address,
|
||||
).get("NewDisallow")
|
||||
except FRITZ_EXCEPTIONS as ex:
|
||||
_LOGGER.debug(
|
||||
"could not get WAN access rule for client device with IP '%s', error: %s",
|
||||
ip_address,
|
||||
ex,
|
||||
)
|
||||
return None
|
||||
|
||||
async def async_scan_devices(self, now: datetime | None = None) -> None:
|
||||
"""Wrap up FritzboxTools class scan."""
|
||||
@@ -310,11 +327,19 @@ class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
|
||||
_LOGGER.debug("Checking host info for FRITZ!Box device %s", self.host)
|
||||
self._update_available, self._latest_firmware = self._update_device_info()
|
||||
|
||||
try:
|
||||
topology = self.fritz_hosts.get_mesh_topology()
|
||||
except FritzActionError:
|
||||
self.mesh_role = MeshRoles.SLAVE
|
||||
return
|
||||
if (
|
||||
"Hosts1" not in self.connection.services
|
||||
or "X_AVM-DE_GetMeshListPath"
|
||||
not in self.connection.services["Hosts1"].actions
|
||||
):
|
||||
self.mesh_role = MeshRoles.NONE
|
||||
else:
|
||||
try:
|
||||
topology = self.fritz_hosts.get_mesh_topology()
|
||||
except FritzActionError:
|
||||
self.mesh_role = MeshRoles.SLAVE
|
||||
# Avoid duplicating device trackers
|
||||
return
|
||||
|
||||
_LOGGER.debug("Checking devices for FRITZ!Box device %s", self.host)
|
||||
_default_consider_home = DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
@@ -559,11 +584,11 @@ class AvmWrapper(FritzBoxTools):
|
||||
)
|
||||
return {}
|
||||
|
||||
async def async_get_wan_dsl_interface_config(self) -> dict[str, Any]:
|
||||
"""Call WANDSLInterfaceConfig service."""
|
||||
async def async_get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return await self.hass.async_add_executor_job(
|
||||
partial(self.get_wan_dsl_interface_config)
|
||||
partial(self.get_wan_link_properties)
|
||||
)
|
||||
|
||||
async def async_get_port_mapping(self, con_type: str, index: int) -> dict[str, Any]:
|
||||
@@ -663,10 +688,12 @@ class AvmWrapper(FritzBoxTools):
|
||||
|
||||
return self._service_call_action("WLANConfiguration", str(index), "GetInfo")
|
||||
|
||||
def get_wan_dsl_interface_config(self) -> dict[str, Any]:
|
||||
"""Call WANDSLInterfaceConfig service."""
|
||||
def get_wan_link_properties(self) -> dict[str, Any]:
|
||||
"""Call WANCommonInterfaceConfig service."""
|
||||
|
||||
return self._service_call_action("WANDSLInterfaceConfig", "1", "GetInfo")
|
||||
return self._service_call_action(
|
||||
"WANCommonInterfaceConfig", "1", "GetCommonLinkProperties"
|
||||
)
|
||||
|
||||
def set_wlan_configuration(self, index: int, turn_on: bool) -> dict[str, Any]:
|
||||
"""Call SetEnable action from WLANConfiguration service."""
|
||||
|
||||
@@ -25,10 +25,24 @@ async def async_get_config_entry_diagnostics(
|
||||
"current_firmware": avm_wrapper.current_firmware,
|
||||
"latest_firmware": avm_wrapper.latest_firmware,
|
||||
"update_available": avm_wrapper.update_available,
|
||||
"connection_type": avm_wrapper.device_conn_type,
|
||||
"is_router": avm_wrapper.device_is_router,
|
||||
"mesh_role": avm_wrapper.mesh_role,
|
||||
"last_update success": avm_wrapper.last_update_success,
|
||||
"last_exception": avm_wrapper.last_exception,
|
||||
"discovered_services": list(avm_wrapper.connection.services),
|
||||
"client_devices": [
|
||||
{
|
||||
"connected_to": device.connected_to,
|
||||
"connection_type": device.connection_type,
|
||||
"hostname": device.hostname,
|
||||
"is_connected": device.is_connected,
|
||||
"last_activity": device.last_activity,
|
||||
"wan_access": device.wan_access,
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -277,10 +277,14 @@ async def async_setup_entry(
|
||||
_LOGGER.debug("Setting up FRITZ!Box sensors")
|
||||
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
dsl: bool = False
|
||||
dslinterface = await avm_wrapper.async_get_wan_dsl_interface_config()
|
||||
if dslinterface:
|
||||
dsl = dslinterface["NewEnable"]
|
||||
link_properties = await avm_wrapper.async_get_wan_link_properties()
|
||||
dsl: bool = link_properties.get("NewWANAccessType") == "DSL"
|
||||
|
||||
_LOGGER.debug(
|
||||
"WANAccessType of FritzBox %s is '%s'",
|
||||
avm_wrapper.host,
|
||||
link_properties.get("NewWANAccessType"),
|
||||
)
|
||||
|
||||
entities = [
|
||||
FritzBoxSensor(avm_wrapper, entry.title, description)
|
||||
|
||||
@@ -81,16 +81,12 @@ def port_entities_list(
|
||||
|
||||
_LOGGER.debug("Setting up %s switches", SWITCH_TYPE_PORTFORWARD)
|
||||
entities_list: list[FritzBoxPortSwitch] = []
|
||||
connection_type = avm_wrapper.get_default_connection()
|
||||
if not connection_type:
|
||||
if not avm_wrapper.device_conn_type:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_PORTFORWARD)
|
||||
return []
|
||||
|
||||
# Return NewDefaultConnectionService sample: "1.WANPPPConnection.1"
|
||||
con_type: str = connection_type["NewDefaultConnectionService"][2:][:-2]
|
||||
|
||||
# Query port forwardings and setup a switch for each forward for the current device
|
||||
resp = avm_wrapper.get_num_port_mapping(con_type)
|
||||
resp = avm_wrapper.get_num_port_mapping(avm_wrapper.device_conn_type)
|
||||
if not resp:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
|
||||
return []
|
||||
@@ -107,7 +103,7 @@ def port_entities_list(
|
||||
|
||||
for i in range(port_forwards_count):
|
||||
|
||||
portmap = avm_wrapper.get_port_mapping(con_type, i)
|
||||
portmap = avm_wrapper.get_port_mapping(avm_wrapper.device_conn_type, i)
|
||||
if not portmap:
|
||||
_LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION)
|
||||
continue
|
||||
@@ -133,7 +129,7 @@ def port_entities_list(
|
||||
portmap,
|
||||
port_name,
|
||||
i,
|
||||
con_type,
|
||||
avm_wrapper.device_conn_type,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ENTITY_CATEGORY_CONFIG
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import FritzBoxEntity
|
||||
@@ -49,7 +49,7 @@ BINARY_SENSOR_TYPES: Final[tuple[FritzBinarySensorEntityDescription, ...]] = (
|
||||
key="lock",
|
||||
name="Button Lock on Device",
|
||||
device_class=BinarySensorDeviceClass.LOCK,
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
suitable=lambda device: device.lock is not None,
|
||||
is_on=lambda device: not device.lock,
|
||||
),
|
||||
@@ -57,7 +57,7 @@ BINARY_SENSOR_TYPES: Final[tuple[FritzBinarySensorEntityDescription, ...]] = (
|
||||
key="device_lock",
|
||||
name="Button Lock via UI",
|
||||
device_class=BinarySensorDeviceClass.LOCK,
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
suitable=lambda device: device.device_lock is not None,
|
||||
is_on=lambda device: not device.device_lock,
|
||||
),
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20220202.0"
|
||||
"home-assistant-frontend==20220203.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
"config",
|
||||
"device_automation",
|
||||
"diagnostics",
|
||||
"http",
|
||||
"lovelace",
|
||||
"onboarding",
|
||||
|
||||
@@ -96,7 +96,7 @@ class GenericCamera(Camera):
|
||||
if self._stream_source is not None:
|
||||
self._stream_source.hass = hass
|
||||
self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE]
|
||||
self._attr_frames_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._supported_features = SUPPORT_STREAM if self._stream_source else 0
|
||||
self.content_type = device_info[CONF_CONTENT_TYPE]
|
||||
self.verify_ssl = device_info[CONF_VERIFY_SSL]
|
||||
|
||||
@@ -10,7 +10,6 @@ from aiogithubapi import (
|
||||
GitHubException,
|
||||
GitHubLoginDeviceModel,
|
||||
GitHubLoginOauthModel,
|
||||
GitHubRepositoryModel,
|
||||
)
|
||||
from aiogithubapi.const import OAUTH_USER_LOGIN
|
||||
import voluptuous as vol
|
||||
@@ -34,11 +33,12 @@ from .const import (
|
||||
)
|
||||
|
||||
|
||||
async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
"""Return a list of repositories that the user has starred."""
|
||||
async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
"""Return a list of repositories that the user owns or has starred."""
|
||||
client = GitHubAPI(token=access_token, session=async_get_clientsession(hass))
|
||||
repositories = set()
|
||||
|
||||
async def _get_starred() -> list[GitHubRepositoryModel] | None:
|
||||
async def _get_starred_repositories() -> None:
|
||||
response = await client.user.starred(**{"params": {"per_page": 100}})
|
||||
if not response.is_last_page:
|
||||
results = await asyncio.gather(
|
||||
@@ -54,16 +54,44 @@ async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[s
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
return response.data
|
||||
repositories.update(response.data)
|
||||
|
||||
async def _get_personal_repositories() -> None:
|
||||
response = await client.user.repos(**{"params": {"per_page": 100}})
|
||||
if not response.is_last_page:
|
||||
results = await asyncio.gather(
|
||||
*(
|
||||
client.user.repos(
|
||||
**{"params": {"per_page": 100, "page": page_number}},
|
||||
)
|
||||
for page_number in range(
|
||||
response.next_page_number, response.last_page_number + 1
|
||||
)
|
||||
)
|
||||
)
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
repositories.update(response.data)
|
||||
|
||||
try:
|
||||
result = await _get_starred()
|
||||
await asyncio.gather(
|
||||
*(
|
||||
_get_starred_repositories(),
|
||||
_get_personal_repositories(),
|
||||
)
|
||||
)
|
||||
|
||||
except GitHubException:
|
||||
return DEFAULT_REPOSITORIES
|
||||
|
||||
if not result or len(result) == 0:
|
||||
if len(repositories) == 0:
|
||||
return DEFAULT_REPOSITORIES
|
||||
return sorted((repo.full_name for repo in result), key=str.casefold)
|
||||
|
||||
return sorted(
|
||||
(repo.full_name for repo in repositories),
|
||||
key=str.casefold,
|
||||
)
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -153,9 +181,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
assert self._login is not None
|
||||
|
||||
if not user_input:
|
||||
repositories = await starred_repositories(
|
||||
self.hass, self._login.access_token
|
||||
)
|
||||
repositories = await get_repositories(self.hass, self._login.access_token)
|
||||
return self.async_show_form(
|
||||
step_id="repositories",
|
||||
data_schema=vol.Schema(
|
||||
@@ -205,7 +231,7 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
configured_repositories: list[str] = self.config_entry.options[
|
||||
CONF_REPOSITORIES
|
||||
]
|
||||
repositories = await starred_repositories(
|
||||
repositories = await get_repositories(
|
||||
self.hass, self.config_entry.data[CONF_ACCESS_TOKEN]
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "GitHub",
|
||||
"documentation": "https://www.home-assistant.io/integrations/github",
|
||||
"requirements": [
|
||||
"aiogithubapi==22.1.0"
|
||||
"aiogithubapi==22.2.0"
|
||||
],
|
||||
"codeowners": [
|
||||
"@timmo001",
|
||||
|
||||
@@ -9,9 +9,9 @@ from goodwe import Inverter, InverterError
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ENTITY_CATEGORY_CONFIG, PERCENTAGE, POWER_WATT
|
||||
from homeassistant.const import PERCENTAGE, POWER_WATT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, KEY_DEVICE_INFO, KEY_INVERTER
|
||||
@@ -39,7 +39,7 @@ NUMBERS = (
|
||||
key="grid_export_limit",
|
||||
name="Grid export limit",
|
||||
icon="mdi:transmission-tower",
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=POWER_WATT,
|
||||
getter=lambda inv: inv.get_grid_export_limit(),
|
||||
setter=lambda inv, val: inv.set_grid_export_limit(val),
|
||||
@@ -51,7 +51,7 @@ NUMBERS = (
|
||||
key="battery_discharge_depth",
|
||||
name="Depth of discharge (on-grid)",
|
||||
icon="mdi:battery-arrow-down",
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=PERCENTAGE,
|
||||
getter=lambda inv: inv.get_ongrid_battery_dod(),
|
||||
setter=lambda inv, val: inv.set_ongrid_battery_dod(val),
|
||||
|
||||
@@ -5,9 +5,8 @@ from goodwe import Inverter, InverterError
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ENTITY_CATEGORY_CONFIG
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, KEY_DEVICE_INFO, KEY_INVERTER
|
||||
@@ -26,7 +25,7 @@ OPERATION_MODE = SelectEntityDescription(
|
||||
key="operation_mode",
|
||||
name="Inverter operation mode",
|
||||
icon="mdi:solar-power",
|
||||
entity_category=ENTITY_CATEGORY_CONFIG,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -57,6 +57,8 @@ KEY_POSITION = "position"
|
||||
|
||||
DEFAULT_NAME = "Cover Group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
|
||||
@@ -52,6 +52,8 @@ SUPPORTED_FLAGS = {SUPPORT_SET_SPEED, SUPPORT_DIRECTION, SUPPORT_OSCILLATE}
|
||||
|
||||
DEFAULT_NAME = "Fan Group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
|
||||
@@ -58,6 +58,9 @@ from .util import find_state_attributes, mean_tuple, reduce_attribute
|
||||
|
||||
DEFAULT_NAME = "Light Group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
|
||||
@@ -191,6 +191,8 @@ def parse_mapping(mapping, parents=None):
|
||||
def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901
|
||||
"""Set up the CEC capability."""
|
||||
|
||||
hass.data[DOMAIN] = {}
|
||||
|
||||
# Parse configuration into a dict of device name to physical address
|
||||
# represented as a list of four elements.
|
||||
device_aliases = {}
|
||||
|
||||
@@ -26,7 +26,7 @@ from pycec.const import (
|
||||
|
||||
from homeassistant.components.media_player import MediaPlayerEntity
|
||||
from homeassistant.components.media_player.const import (
|
||||
DOMAIN,
|
||||
DOMAIN as MP_DOMAIN,
|
||||
SUPPORT_NEXT_TRACK,
|
||||
SUPPORT_PAUSE,
|
||||
SUPPORT_PLAY_MEDIA,
|
||||
@@ -48,11 +48,11 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import ATTR_NEW, CecEntity
|
||||
from . import ATTR_NEW, DOMAIN, CecEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
ENTITY_ID_FORMAT = MP_DOMAIN + ".{}"
|
||||
|
||||
|
||||
def setup_platform(
|
||||
@@ -77,7 +77,7 @@ class CecPlayerEntity(CecEntity, MediaPlayerEntity):
|
||||
def __init__(self, device, logical) -> None:
|
||||
"""Initialize the HDMI device."""
|
||||
CecEntity.__init__(self, device, logical)
|
||||
self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
|
||||
self.entity_id = f"{MP_DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
|
||||
|
||||
def send_keypress(self, key):
|
||||
"""Send keypress to CEC adapter."""
|
||||
|
||||
@@ -3,17 +3,17 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.switch import DOMAIN, SwitchEntity
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import ATTR_NEW, CecEntity
|
||||
from . import ATTR_NEW, DOMAIN, CecEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
ENTITY_ID_FORMAT = SWITCH_DOMAIN + ".{}"
|
||||
|
||||
|
||||
def setup_platform(
|
||||
@@ -38,7 +38,7 @@ class CecSwitchEntity(CecEntity, SwitchEntity):
|
||||
def __init__(self, device, logical) -> None:
|
||||
"""Initialize the HDMI device."""
|
||||
CecEntity.__init__(self, device, logical)
|
||||
self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
|
||||
self.entity_id = f"{SWITCH_DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
|
||||
|
||||
def turn_on(self, **kwargs) -> None:
|
||||
"""Turn device on."""
|
||||
|
||||
44
homeassistant/components/homekit/diagnostics.py
Normal file
44
homeassistant/components/homekit/diagnostics.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Diagnostics support for HomeKit."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyhap.accessory_driver import AccessoryDriver
|
||||
from pyhap.state import State
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import HomeKit
|
||||
from .const import DOMAIN, HOMEKIT
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
homekit: HomeKit = hass.data[DOMAIN][entry.entry_id][HOMEKIT]
|
||||
driver: AccessoryDriver = homekit.driver
|
||||
data: dict[str, Any] = {
|
||||
"status": homekit.status,
|
||||
"config-entry": {
|
||||
"title": entry.title,
|
||||
"version": entry.version,
|
||||
"data": dict(entry.data),
|
||||
"options": dict(entry.options),
|
||||
},
|
||||
}
|
||||
if not driver:
|
||||
return data
|
||||
data.update(driver.get_accessories())
|
||||
state: State = driver.state
|
||||
data.update(
|
||||
{
|
||||
"client_properties": {
|
||||
str(client): props for client, props in state.client_properties.items()
|
||||
},
|
||||
"config_version": state.config_version,
|
||||
"pairing_id": state.mac,
|
||||
}
|
||||
)
|
||||
return data
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Class to hold all light accessories."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import math
|
||||
|
||||
@@ -12,12 +14,13 @@ from homeassistant.components.light import (
|
||||
ATTR_HS_COLOR,
|
||||
ATTR_MAX_MIREDS,
|
||||
ATTR_MIN_MIREDS,
|
||||
ATTR_RGB_COLOR,
|
||||
ATTR_RGBW_COLOR,
|
||||
ATTR_RGBWW_COLOR,
|
||||
ATTR_SUPPORTED_COLOR_MODES,
|
||||
ATTR_WHITE,
|
||||
COLOR_MODE_RGBW,
|
||||
COLOR_MODE_RGBWW,
|
||||
COLOR_MODE_WHITE,
|
||||
DOMAIN,
|
||||
brightness_supported,
|
||||
color_supported,
|
||||
@@ -32,9 +35,9 @@ from homeassistant.const import (
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.color import (
|
||||
color_hsv_to_RGB,
|
||||
color_temperature_mired_to_kelvin,
|
||||
color_temperature_to_hs,
|
||||
color_temperature_to_rgbww,
|
||||
)
|
||||
|
||||
from .accessories import TYPES, HomeAccessory
|
||||
@@ -51,12 +54,13 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RGB_COLOR = "rgb_color"
|
||||
|
||||
CHANGE_COALESCE_TIME_WINDOW = 0.01
|
||||
|
||||
DEFAULT_MIN_MIREDS = 153
|
||||
DEFAULT_MAX_MIREDS = 500
|
||||
|
||||
COLOR_MODES_WITH_WHITES = {COLOR_MODE_RGBW, COLOR_MODE_RGBWW}
|
||||
COLOR_MODES_WITH_WHITES = {COLOR_MODE_RGBW, COLOR_MODE_RGBWW, COLOR_MODE_WHITE}
|
||||
|
||||
|
||||
@TYPES.register("Light")
|
||||
@@ -79,8 +83,12 @@ class Light(HomeAccessory):
|
||||
self.color_modes = color_modes = (
|
||||
attributes.get(ATTR_SUPPORTED_COLOR_MODES) or []
|
||||
)
|
||||
self._previous_color_mode = attributes.get(ATTR_COLOR_MODE)
|
||||
self.color_supported = color_supported(color_modes)
|
||||
self.color_temp_supported = color_temp_supported(color_modes)
|
||||
self.rgbw_supported = COLOR_MODE_RGBW in color_modes
|
||||
self.rgbww_supported = COLOR_MODE_RGBWW in color_modes
|
||||
self.white_supported = COLOR_MODE_WHITE in color_modes
|
||||
self.brightness_supported = brightness_supported(color_modes)
|
||||
|
||||
if self.brightness_supported:
|
||||
@@ -89,7 +97,9 @@ class Light(HomeAccessory):
|
||||
if self.color_supported:
|
||||
self.chars.extend([CHAR_HUE, CHAR_SATURATION])
|
||||
|
||||
if self.color_temp_supported:
|
||||
if self.color_temp_supported or COLOR_MODES_WITH_WHITES.intersection(
|
||||
self.color_modes
|
||||
):
|
||||
self.chars.append(CHAR_COLOR_TEMPERATURE)
|
||||
|
||||
serv_light = self.add_preload_service(SERV_LIGHTBULB, self.chars)
|
||||
@@ -101,13 +111,22 @@ class Light(HomeAccessory):
|
||||
# to set to the correct initial value.
|
||||
self.char_brightness = serv_light.configure_char(CHAR_BRIGHTNESS, value=100)
|
||||
|
||||
if self.color_temp_supported:
|
||||
min_mireds = math.floor(attributes.get(ATTR_MIN_MIREDS, 153))
|
||||
max_mireds = math.ceil(attributes.get(ATTR_MAX_MIREDS, 500))
|
||||
if CHAR_COLOR_TEMPERATURE in self.chars:
|
||||
self.min_mireds = math.floor(
|
||||
attributes.get(ATTR_MIN_MIREDS, DEFAULT_MIN_MIREDS)
|
||||
)
|
||||
self.max_mireds = math.ceil(
|
||||
attributes.get(ATTR_MAX_MIREDS, DEFAULT_MAX_MIREDS)
|
||||
)
|
||||
if not self.color_temp_supported and not self.rgbww_supported:
|
||||
self.max_mireds = self.min_mireds
|
||||
self.char_color_temp = serv_light.configure_char(
|
||||
CHAR_COLOR_TEMPERATURE,
|
||||
value=min_mireds,
|
||||
properties={PROP_MIN_VALUE: min_mireds, PROP_MAX_VALUE: max_mireds},
|
||||
value=self.min_mireds,
|
||||
properties={
|
||||
PROP_MIN_VALUE: self.min_mireds,
|
||||
PROP_MAX_VALUE: self.max_mireds,
|
||||
},
|
||||
)
|
||||
|
||||
if self.color_supported:
|
||||
@@ -165,33 +184,32 @@ class Light(HomeAccessory):
|
||||
)
|
||||
return
|
||||
|
||||
# Handle white channels
|
||||
if CHAR_COLOR_TEMPERATURE in char_values:
|
||||
params[ATTR_COLOR_TEMP] = char_values[CHAR_COLOR_TEMPERATURE]
|
||||
events.append(f"color temperature at {params[ATTR_COLOR_TEMP]}")
|
||||
temp = char_values[CHAR_COLOR_TEMPERATURE]
|
||||
events.append(f"color temperature at {temp}")
|
||||
bright_val = round(
|
||||
((brightness_pct or self.char_brightness.value) * 255) / 100
|
||||
)
|
||||
if self.color_temp_supported:
|
||||
params[ATTR_COLOR_TEMP] = temp
|
||||
elif self.rgbww_supported:
|
||||
params[ATTR_RGBWW_COLOR] = color_temperature_to_rgbww(
|
||||
temp, bright_val, self.min_mireds, self.max_mireds
|
||||
)
|
||||
elif self.rgbw_supported:
|
||||
params[ATTR_RGBW_COLOR] = (*(0,) * 3, bright_val)
|
||||
elif self.white_supported:
|
||||
params[ATTR_WHITE] = bright_val
|
||||
|
||||
elif (
|
||||
CHAR_HUE in char_values
|
||||
or CHAR_SATURATION in char_values
|
||||
# If we are adjusting brightness we need to send the full RGBW/RGBWW values
|
||||
# since HomeKit does not support RGBW/RGBWW
|
||||
or brightness_pct
|
||||
and COLOR_MODES_WITH_WHITES.intersection(self.color_modes)
|
||||
):
|
||||
elif CHAR_HUE in char_values or CHAR_SATURATION in char_values:
|
||||
hue_sat = (
|
||||
char_values.get(CHAR_HUE, self.char_hue.value),
|
||||
char_values.get(CHAR_SATURATION, self.char_saturation.value),
|
||||
)
|
||||
_LOGGER.debug("%s: Set hs_color to %s", self.entity_id, hue_sat)
|
||||
events.append(f"set color at {hue_sat}")
|
||||
# HomeKit doesn't support RGBW/RGBWW so we need to remove any white values
|
||||
if COLOR_MODE_RGBWW in self.color_modes:
|
||||
val = brightness_pct or self.char_brightness.value
|
||||
params[ATTR_RGBWW_COLOR] = (*color_hsv_to_RGB(*hue_sat, val), 0, 0)
|
||||
elif COLOR_MODE_RGBW in self.color_modes:
|
||||
val = brightness_pct or self.char_brightness.value
|
||||
params[ATTR_RGBW_COLOR] = (*color_hsv_to_RGB(*hue_sat, val), 0)
|
||||
else:
|
||||
params[ATTR_HS_COLOR] = hue_sat
|
||||
params[ATTR_HS_COLOR] = hue_sat
|
||||
|
||||
if (
|
||||
brightness_pct
|
||||
@@ -200,6 +218,9 @@ class Light(HomeAccessory):
|
||||
):
|
||||
params[ATTR_BRIGHTNESS_PCT] = brightness_pct
|
||||
|
||||
_LOGGER.debug(
|
||||
"Calling light service with params: %s -> %s", char_values, params
|
||||
)
|
||||
self.async_call_service(DOMAIN, service, params, ", ".join(events))
|
||||
|
||||
@callback
|
||||
@@ -210,52 +231,59 @@ class Light(HomeAccessory):
|
||||
attributes = new_state.attributes
|
||||
color_mode = attributes.get(ATTR_COLOR_MODE)
|
||||
self.char_on.set_value(int(state == STATE_ON))
|
||||
color_mode_changed = self._previous_color_mode != color_mode
|
||||
self._previous_color_mode = color_mode
|
||||
|
||||
# Handle Brightness
|
||||
if self.brightness_supported:
|
||||
if (
|
||||
color_mode
|
||||
and COLOR_MODES_WITH_WHITES.intersection({color_mode})
|
||||
and (rgb_color := attributes.get(ATTR_RGB_COLOR))
|
||||
):
|
||||
# HomeKit doesn't support RGBW/RGBWW so we need to
|
||||
# give it the color brightness only
|
||||
brightness = max(rgb_color)
|
||||
else:
|
||||
brightness = attributes.get(ATTR_BRIGHTNESS)
|
||||
if isinstance(brightness, (int, float)):
|
||||
brightness = round(brightness / 255 * 100, 0)
|
||||
# The homeassistant component might report its brightness as 0 but is
|
||||
# not off. But 0 is a special value in homekit. When you turn on a
|
||||
# homekit accessory it will try to restore the last brightness state
|
||||
# which will be the last value saved by char_brightness.set_value.
|
||||
# But if it is set to 0, HomeKit will update the brightness to 100 as
|
||||
# it thinks 0 is off.
|
||||
#
|
||||
# Therefore, if the the brightness is 0 and the device is still on,
|
||||
# the brightness is mapped to 1 otherwise the update is ignored in
|
||||
# order to avoid this incorrect behavior.
|
||||
if brightness == 0 and state == STATE_ON:
|
||||
brightness = 1
|
||||
self.char_brightness.set_value(brightness)
|
||||
if (
|
||||
self.brightness_supported
|
||||
and (brightness := attributes.get(ATTR_BRIGHTNESS)) is not None
|
||||
and isinstance(brightness, (int, float))
|
||||
):
|
||||
brightness = round(brightness / 255 * 100, 0)
|
||||
# The homeassistant component might report its brightness as 0 but is
|
||||
# not off. But 0 is a special value in homekit. When you turn on a
|
||||
# homekit accessory it will try to restore the last brightness state
|
||||
# which will be the last value saved by char_brightness.set_value.
|
||||
# But if it is set to 0, HomeKit will update the brightness to 100 as
|
||||
# it thinks 0 is off.
|
||||
#
|
||||
# Therefore, if the the brightness is 0 and the device is still on,
|
||||
# the brightness is mapped to 1 otherwise the update is ignored in
|
||||
# order to avoid this incorrect behavior.
|
||||
if brightness == 0 and state == STATE_ON:
|
||||
brightness = 1
|
||||
self.char_brightness.set_value(brightness)
|
||||
if color_mode_changed:
|
||||
self.char_brightness.notify()
|
||||
|
||||
# Handle Color - color must always be set before color temperature
|
||||
# or the iOS UI will not display it correctly.
|
||||
if self.color_supported:
|
||||
if ATTR_COLOR_TEMP in attributes:
|
||||
if color_temp := attributes.get(ATTR_COLOR_TEMP):
|
||||
hue, saturation = color_temperature_to_hs(
|
||||
color_temperature_mired_to_kelvin(
|
||||
new_state.attributes[ATTR_COLOR_TEMP]
|
||||
)
|
||||
color_temperature_mired_to_kelvin(color_temp)
|
||||
)
|
||||
elif color_mode == COLOR_MODE_WHITE:
|
||||
hue, saturation = 0, 0
|
||||
else:
|
||||
hue, saturation = attributes.get(ATTR_HS_COLOR, (None, None))
|
||||
if isinstance(hue, (int, float)) and isinstance(saturation, (int, float)):
|
||||
self.char_hue.set_value(round(hue, 0))
|
||||
self.char_saturation.set_value(round(saturation, 0))
|
||||
if color_mode_changed:
|
||||
# If the color temp changed, be sure to force the color to update
|
||||
self.char_hue.notify()
|
||||
self.char_saturation.notify()
|
||||
|
||||
# Handle color temperature
|
||||
if self.color_temp_supported:
|
||||
color_temp = attributes.get(ATTR_COLOR_TEMP)
|
||||
# Handle white channels
|
||||
if CHAR_COLOR_TEMPERATURE in self.chars:
|
||||
color_temp = None
|
||||
if self.color_temp_supported:
|
||||
color_temp = attributes.get(ATTR_COLOR_TEMP)
|
||||
elif color_mode == COLOR_MODE_WHITE:
|
||||
color_temp = self.min_mireds
|
||||
if isinstance(color_temp, (int, float)):
|
||||
self.char_color_temp.set_value(round(color_temp, 0))
|
||||
if color_mode_changed:
|
||||
self.char_color_temp.notify()
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "HomematicIP Cloud",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"requirements": ["homematicip==1.0.1"],
|
||||
"requirements": ["homematicip==1.0.2"],
|
||||
"codeowners": [],
|
||||
"quality_scale": "platinum",
|
||||
"iot_class": "cloud_push"
|
||||
|
||||
@@ -3,10 +3,11 @@ import logging
|
||||
|
||||
from aiohwenergy import DisabledError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
@@ -20,6 +21,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
_LOGGER.debug("__init__ async_setup_entry")
|
||||
|
||||
# Migrate `homewizard_energy` (custom_component) to `homewizard`
|
||||
if entry.source == SOURCE_IMPORT and "old_config_entry_id" in entry.data:
|
||||
# Remove the old config entry ID from the entry data so we don't try this again
|
||||
# on the next setup
|
||||
data = entry.data.copy()
|
||||
old_config_entry_id = data.pop("old_config_entry_id")
|
||||
|
||||
hass.config_entries.async_update_entry(entry, data=data)
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Setting up imported homewizard_energy entry %s for the first time as "
|
||||
"homewizard entry %s"
|
||||
),
|
||||
old_config_entry_id,
|
||||
entry.entry_id,
|
||||
)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
for entity in er.async_entries_for_config_entry(ent_reg, old_config_entry_id):
|
||||
_LOGGER.debug("Removing %s", entity.entity_id)
|
||||
ent_reg.async_remove(entity.entity_id)
|
||||
|
||||
_LOGGER.debug("Re-creating %s for the new config entry", entity.entity_id)
|
||||
# We will precreate the entity so that any customizations can be preserved
|
||||
new_entity = ent_reg.async_get_or_create(
|
||||
entity.domain,
|
||||
DOMAIN,
|
||||
entity.unique_id,
|
||||
suggested_object_id=entity.entity_id.split(".")[1],
|
||||
disabled_by=entity.disabled_by,
|
||||
config_entry=entry,
|
||||
original_name=entity.original_name,
|
||||
original_icon=entity.original_icon,
|
||||
)
|
||||
_LOGGER.debug("Re-created %s", new_entity.entity_id)
|
||||
|
||||
# If there are customizations on the old entity, apply them to the new one
|
||||
if entity.name or entity.icon:
|
||||
ent_reg.async_update_entity(
|
||||
new_entity.entity_id, name=entity.name, icon=entity.icon
|
||||
)
|
||||
|
||||
# Remove the old config entry and now the entry is fully migrated
|
||||
hass.async_create_task(hass.config_entries.async_remove(old_config_entry_id))
|
||||
|
||||
# Create coordinator
|
||||
coordinator = Coordinator(hass, entry.data[CONF_IP_ADDRESS])
|
||||
try:
|
||||
|
||||
@@ -28,6 +28,21 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Initialize the HomeWizard config flow."""
|
||||
self.config: dict[str, str | int] = {}
|
||||
|
||||
async def async_step_import(self, import_config: dict) -> FlowResult:
|
||||
"""Handle a flow initiated by older `homewizard_energy` component."""
|
||||
_LOGGER.debug("config_flow async_step_import")
|
||||
|
||||
self.hass.components.persistent_notification.async_create(
|
||||
(
|
||||
"The custom integration of HomeWizard Energy has been migrated to core. "
|
||||
"You can safely remove the custom integration from the custom_integrations folder."
|
||||
),
|
||||
"HomeWizard Energy",
|
||||
f"homewizard_energy_to_{DOMAIN}",
|
||||
)
|
||||
|
||||
return await self.async_step_user({CONF_IP_ADDRESS: import_config["host"]})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
@@ -59,12 +74,17 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
data: dict[str, str] = {CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS]}
|
||||
|
||||
if self.source == config_entries.SOURCE_IMPORT:
|
||||
old_config_entry_id = self.context["old_config_entry_id"]
|
||||
assert self.hass.config_entries.async_get_entry(old_config_entry_id)
|
||||
data["old_config_entry_id"] = old_config_entry_id
|
||||
|
||||
# Add entry
|
||||
return self.async_create_entry(
|
||||
title=f"{device_info[CONF_PRODUCT_NAME]} ({device_info[CONF_SERIAL]})",
|
||||
data={
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
},
|
||||
data=data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
||||
@@ -16,13 +16,12 @@ from homeassistant.const import (
|
||||
DEVICE_CLASS_GAS,
|
||||
DEVICE_CLASS_POWER,
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
PERCENTAGE,
|
||||
POWER_WATT,
|
||||
VOLUME_CUBIC_METERS,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -37,19 +36,19 @@ SENSORS: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
key="smr_version",
|
||||
name="DSMR Version",
|
||||
icon="mdi:counter",
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="meter_model",
|
||||
name="Smart Meter Model",
|
||||
icon="mdi:gauge",
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="wifi_ssid",
|
||||
name="Wifi SSID",
|
||||
icon="mdi:wifi",
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="wifi_strength",
|
||||
@@ -57,7 +56,7 @@ SENSORS: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
icon="mdi:wifi",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Philips Hue",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hue",
|
||||
"requirements": ["aiohue==4.0.1"],
|
||||
"requirements": ["aiohue==4.1.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics",
|
||||
|
||||
@@ -5,7 +5,11 @@ from typing import Any
|
||||
|
||||
from aiohue.v2 import HueBridgeV2
|
||||
from aiohue.v2.controllers.events import EventType
|
||||
from aiohue.v2.controllers.scenes import Scene as HueScene, ScenesController
|
||||
from aiohue.v2.controllers.scenes import (
|
||||
Scene as HueScene,
|
||||
ScenePut as HueScenePut,
|
||||
ScenesController,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.scene import ATTR_TRANSITION, Scene as SceneEntity
|
||||
@@ -131,7 +135,7 @@ class HueSceneEntity(HueBaseEntity, SceneEntity):
|
||||
await self.bridge.async_request_call(
|
||||
self.controller.update,
|
||||
self.resource.id,
|
||||
HueScene(self.resource.id, speed=speed / 100),
|
||||
HueScenePut(speed=speed / 100),
|
||||
)
|
||||
|
||||
await self.bridge.async_request_call(
|
||||
|
||||
@@ -5,8 +5,12 @@ from typing import Any, Union
|
||||
|
||||
from aiohue.v2 import HueBridgeV2
|
||||
from aiohue.v2.controllers.events import EventType
|
||||
from aiohue.v2.controllers.sensors import LightLevelController, MotionController
|
||||
from aiohue.v2.models.resource import SensingService
|
||||
from aiohue.v2.controllers.sensors import (
|
||||
LightLevel,
|
||||
LightLevelController,
|
||||
Motion,
|
||||
MotionController,
|
||||
)
|
||||
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,6 +24,8 @@ from .v2.entity import HueBaseEntity
|
||||
|
||||
ControllerType = Union[LightLevelController, MotionController]
|
||||
|
||||
SensingService = Union[LightLevel, Motion]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -4,13 +4,13 @@ from __future__ import annotations
|
||||
from typing import Any, Union
|
||||
|
||||
from aiohue.v2 import HueBridgeV2
|
||||
from aiohue.v2.controllers.config import EntertainmentConfigurationController
|
||||
from aiohue.v2.controllers.config import (
|
||||
EntertainmentConfiguration,
|
||||
EntertainmentConfigurationController,
|
||||
)
|
||||
from aiohue.v2.controllers.events import EventType
|
||||
from aiohue.v2.controllers.sensors import MotionController
|
||||
from aiohue.v2.models.entertainment import (
|
||||
EntertainmentConfiguration,
|
||||
EntertainmentStatus,
|
||||
)
|
||||
from aiohue.v2.models.entertainment_configuration import EntertainmentStatus
|
||||
from aiohue.v2.models.motion import Motion
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -109,4 +109,4 @@ class HueEntertainmentActiveSensor(HueBinarySensorBase):
|
||||
def name(self) -> str:
|
||||
"""Return sensor name."""
|
||||
type_title = self.resource.type.value.replace("_", " ").title()
|
||||
return f"{self.resource.name}: {type_title}"
|
||||
return f"{self.resource.metadata.name}: {type_title}"
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
"""Generic Hue Entity Model."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from aiohue.v2.controllers.base import BaseResourcesController
|
||||
from aiohue.v2.controllers.events import EventType
|
||||
from aiohue.v2.models.clip import CLIPResource
|
||||
from aiohue.v2.models.connectivity import ConnectivityServiceStatus
|
||||
from aiohue.v2.models.resource import ResourceTypes
|
||||
from aiohue.v2.models.zigbee_connectivity import ConnectivityServiceStatus
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity
|
||||
@@ -14,6 +15,16 @@ from homeassistant.helpers.entity_registry import async_get as async_get_entity_
|
||||
from ..bridge import HueBridge
|
||||
from ..const import CONF_IGNORE_AVAILABILITY, DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aiohue.v2.models.device_power import DevicePower
|
||||
from aiohue.v2.models.grouped_light import GroupedLight
|
||||
from aiohue.v2.models.light import Light
|
||||
from aiohue.v2.models.light_level import LightLevel
|
||||
from aiohue.v2.models.motion import Motion
|
||||
|
||||
HueResource = Union[Light, DevicePower, GroupedLight, LightLevel, Motion]
|
||||
|
||||
|
||||
RESOURCE_TYPE_NAMES = {
|
||||
# a simple mapping of hue resource type to Hass name
|
||||
ResourceTypes.LIGHT_LEVEL: "Illuminance",
|
||||
@@ -30,7 +41,7 @@ class HueBaseEntity(Entity):
|
||||
self,
|
||||
bridge: HueBridge,
|
||||
controller: BaseResourcesController,
|
||||
resource: CLIPResource,
|
||||
resource: HueResource,
|
||||
) -> None:
|
||||
"""Initialize a generic Hue resource entity."""
|
||||
self.bridge = bridge
|
||||
@@ -122,7 +133,7 @@ class HueBaseEntity(Entity):
|
||||
# used in subclasses
|
||||
|
||||
@callback
|
||||
def _handle_event(self, event_type: EventType, resource: CLIPResource) -> None:
|
||||
def _handle_event(self, event_type: EventType, resource: HueResource) -> None:
|
||||
"""Handle status event for this resource (or it's parent)."""
|
||||
if event_type == EventType.RESOURCE_DELETED and resource.id == self.resource.id:
|
||||
self.logger.debug("Received delete for %s", self.entity_id)
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any
|
||||
from aiohue.v2 import HueBridgeV2
|
||||
from aiohue.v2.controllers.events import EventType
|
||||
from aiohue.v2.controllers.groups import GroupedLight, Room, Zone
|
||||
from aiohue.v2.models.feature import DynamicsFeatureStatus
|
||||
from aiohue.v2.models.feature import DynamicStatus
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
@@ -283,7 +283,7 @@ class GroupedHueLight(HueBaseEntity, LightEntity):
|
||||
total_brightness += dimming.brightness
|
||||
if (
|
||||
light.dynamics
|
||||
and light.dynamics.status == DynamicsFeatureStatus.DYNAMIC_PALETTE
|
||||
and light.dynamics.status == DynamicStatus.DYNAMIC_PALETTE
|
||||
):
|
||||
lights_in_dynamic_mode += 1
|
||||
|
||||
|
||||
@@ -12,10 +12,10 @@ from aiohue.v2.controllers.sensors import (
|
||||
TemperatureController,
|
||||
ZigbeeConnectivityController,
|
||||
)
|
||||
from aiohue.v2.models.connectivity import ZigbeeConnectivity
|
||||
from aiohue.v2.models.device_power import DevicePower
|
||||
from aiohue.v2.models.light_level import LightLevel
|
||||
from aiohue.v2.models.temperature import Temperature
|
||||
from aiohue.v2.models.zigbee_connectivity import ZigbeeConnectivity
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
||||
from homeassistant.components.sensor import (
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Image",
|
||||
"config_flow": false,
|
||||
"documentation": "https://www.home-assistant.io/integrations/image",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"dependencies": ["http"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"quality_scale": "internal"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "IntelliFire",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/intellifire",
|
||||
"requirements": ["intellifire4py==0.5"],
|
||||
"requirements": ["intellifire4py==0.6"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@jeeftor"],
|
||||
"iot_class": "local_polling"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/knx",
|
||||
"requirements": [
|
||||
"xknx==0.19.1"
|
||||
"xknx==0.19.2"
|
||||
],
|
||||
"codeowners": [
|
||||
"@Julius2342",
|
||||
|
||||
@@ -35,7 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
||||
from homeassistant.helpers.entity import validate_entity_category
|
||||
|
||||
from .const import (
|
||||
CONF_INVERT,
|
||||
@@ -320,7 +320,7 @@ class BinarySensorSchema(KNXPlatformSchema):
|
||||
),
|
||||
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -356,7 +356,7 @@ class ButtonSchema(KNXPlatformSchema):
|
||||
vol.Exclusive(
|
||||
CONF_TYPE, "length_or_type", msg=length_or_type_msg
|
||||
): object,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -500,7 +500,7 @@ class ClimateSchema(KNXPlatformSchema):
|
||||
): vol.In(HVAC_MODES),
|
||||
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -555,7 +555,7 @@ class CoverSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
|
||||
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -618,7 +618,7 @@ class FanSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_MAX_STEP): cv.byte,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -722,7 +722,7 @@ class LightSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -802,7 +802,7 @@ class NumberSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_MAX): vol.Coerce(float),
|
||||
vol.Optional(CONF_MIN): vol.Coerce(float),
|
||||
vol.Optional(CONF_STEP): cv.positive_float,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
number_limit_sub_validator,
|
||||
@@ -824,7 +824,7 @@ class SceneSchema(KNXPlatformSchema):
|
||||
vol.Required(CONF_SCENE_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=64)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -855,7 +855,7 @@ class SelectSchema(KNXPlatformSchema):
|
||||
],
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
select_options_sub_validator,
|
||||
@@ -880,7 +880,7 @@ class SensorSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -901,7 +901,7 @@ class SwitchSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -948,7 +948,7 @@ class WeatherSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -138,6 +138,11 @@ async def async_setup_entry(
|
||||
|
||||
devices = bridge.get_devices()
|
||||
bridge_device = devices[BRIDGE_DEVICE_ID]
|
||||
if not config_entry.unique_id:
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=hex(bridge_device["serial"])[2:].zfill(8)
|
||||
)
|
||||
|
||||
buttons = bridge.buttons
|
||||
_async_register_bridge_device(hass, entry_id, bridge_device)
|
||||
button_devices = _async_register_button_devices(
|
||||
@@ -227,7 +232,7 @@ def _async_subscribe_pico_remote_events(
|
||||
action = ACTION_RELEASE
|
||||
|
||||
type_ = device["type"]
|
||||
name = device["name"]
|
||||
area, name = device["name"].split("_", 1)
|
||||
button_number = device["button_number"]
|
||||
# The original implementation used LIP instead of LEAP
|
||||
# so we need to convert the button number to maintain compat
|
||||
@@ -252,7 +257,7 @@ def _async_subscribe_pico_remote_events(
|
||||
ATTR_BUTTON_NUMBER: lip_button_number,
|
||||
ATTR_LEAP_BUTTON_NUMBER: button_number,
|
||||
ATTR_DEVICE_NAME: name,
|
||||
ATTR_AREA_NAME: name.split("_")[0],
|
||||
ATTR_AREA_NAME: area,
|
||||
ATTR_ACTION: action,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -44,7 +44,7 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
||||
from homeassistant.helpers.entity import validate_entity_category
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
from .const import (
|
||||
@@ -423,7 +423,7 @@ def _validate_state_class_sensor(value: dict):
|
||||
vol.Optional(ATTR_SENSOR_STATE, default=None): vol.Any(
|
||||
None, bool, str, int, float
|
||||
),
|
||||
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): validate_entity_category,
|
||||
vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon,
|
||||
vol.Optional(ATTR_SENSOR_STATE_CLASS): vol.In(SENSOSR_STATE_CLASSES),
|
||||
},
|
||||
|
||||
@@ -8,8 +8,8 @@ write_coil:
|
||||
required: true
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 255
|
||||
min: 0
|
||||
max: 65535
|
||||
state:
|
||||
name: State
|
||||
description: State to write.
|
||||
@@ -42,8 +42,8 @@ write_register:
|
||||
required: true
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 255
|
||||
min: 0
|
||||
max: 65535
|
||||
unit:
|
||||
name: Unit
|
||||
description: Address of the modbus unit.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Motion Blinds",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"requirements": ["motionblinds==0.5.10"],
|
||||
"requirements": ["motionblinds==0.5.12"],
|
||||
"dependencies": ["network"],
|
||||
"codeowners": ["@starkillerOG"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -577,6 +577,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_subscribe)
|
||||
websocket_api.async_register_command(hass, websocket_remove_device)
|
||||
websocket_api.async_register_command(hass, websocket_mqtt_info)
|
||||
debug_info.initialize(hass)
|
||||
|
||||
if conf is None:
|
||||
# If we have a config entry, setup is done by that config entry.
|
||||
|
||||
@@ -133,6 +133,10 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
if self._expiration_trigger:
|
||||
# We might have set up a trigger already after subscribing from
|
||||
# super().async_added_to_hass()
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
@@ -189,7 +193,6 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
||||
# Reset old trigger
|
||||
if self._expiration_trigger:
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
|
||||
# Set new trigger
|
||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||
|
||||
@@ -15,6 +15,11 @@ DATA_MQTT_DEBUG_INFO = "mqtt_debug_info"
|
||||
STORED_MESSAGES = 10
|
||||
|
||||
|
||||
def initialize(hass: HomeAssistant):
|
||||
"""Initialize MQTT debug info."""
|
||||
hass.data[DATA_MQTT_DEBUG_INFO] = {"entities": {}, "triggers": {}}
|
||||
|
||||
|
||||
def log_messages(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> Callable[[MessageCallbackType], MessageCallbackType]:
|
||||
@@ -45,9 +50,7 @@ def log_messages(
|
||||
def add_subscription(hass, message_callback, subscription):
|
||||
"""Prepare debug data for subscription."""
|
||||
if entity_id := getattr(message_callback, "__entity_id", None):
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
entity_info = debug_info["entities"].setdefault(
|
||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||
)
|
||||
@@ -76,9 +79,7 @@ def remove_subscription(hass, message_callback, subscription):
|
||||
|
||||
def add_entity_discovery_data(hass, discovery_data, entity_id):
|
||||
"""Add discovery data."""
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
entity_info = debug_info["entities"].setdefault(
|
||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||
)
|
||||
@@ -93,14 +94,13 @@ def update_entity_discovery_data(hass, discovery_payload, entity_id):
|
||||
|
||||
def remove_entity_data(hass, entity_id):
|
||||
"""Remove discovery data."""
|
||||
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
||||
if entity_id in hass.data[DATA_MQTT_DEBUG_INFO]["entities"]:
|
||||
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
||||
|
||||
|
||||
def add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):
|
||||
"""Add discovery data."""
|
||||
debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
debug_info["triggers"][discovery_hash] = {
|
||||
"device_id": device_id,
|
||||
"discovery_data": discovery_data,
|
||||
@@ -126,9 +126,7 @@ async def info_for_device(hass, device_id):
|
||||
entries = hass.helpers.entity_registry.async_entries_for_device(
|
||||
entity_registry, device_id, include_disabled_entities=True
|
||||
)
|
||||
mqtt_debug_info = hass.data.setdefault(
|
||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
||||
)
|
||||
mqtt_debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||
for entry in entries:
|
||||
if entry.entity_id not in mqtt_debug_info["entities"]:
|
||||
continue
|
||||
|
||||
@@ -30,11 +30,11 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import (
|
||||
ENTITY_CATEGORIES_SCHEMA,
|
||||
DeviceInfo,
|
||||
Entity,
|
||||
EntityCategory,
|
||||
async_generate_entity_id,
|
||||
validate_entity_category,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -191,7 +191,7 @@ MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
|
||||
@@ -549,7 +549,6 @@ class MqttDiscoveryUpdate(Entity):
|
||||
def _cleanup_discovery_on_remove(self) -> None:
|
||||
"""Stop listening to signal and cleanup discovery data."""
|
||||
if self._discovery_data and not self._removed_from_hass:
|
||||
debug_info.remove_entity_data(self.hass, self.entity_id)
|
||||
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
|
||||
self._removed_from_hass = True
|
||||
|
||||
@@ -677,6 +676,7 @@ class MqttEntity(
|
||||
await MqttAttributes.async_will_remove_from_hass(self)
|
||||
await MqttAvailability.async_will_remove_from_hass(self)
|
||||
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
|
||||
debug_info.remove_entity_data(self.hass, self.entity_id)
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
|
||||
@@ -180,6 +180,10 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
||||
self._expired = False
|
||||
self._state = last_state.state
|
||||
|
||||
if self._expiration_trigger:
|
||||
# We might have set up a trigger already after subscribing from
|
||||
# super().async_added_to_hass()
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = async_track_point_in_utc_time(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
@@ -227,7 +231,6 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
||||
# Reset old trigger
|
||||
if self._expiration_trigger:
|
||||
self._expiration_trigger()
|
||||
self._expiration_trigger = None
|
||||
|
||||
# Set new trigger
|
||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||
@@ -236,7 +239,7 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
|
||||
payload = self._template(msg.payload)
|
||||
payload = self._template(msg.payload, default=self._state)
|
||||
|
||||
if payload is not None and self.device_class in (
|
||||
SensorDeviceClass.DATE,
|
||||
|
||||
@@ -127,6 +127,15 @@ class NestCamera(Camera):
|
||||
return STREAM_TYPE_WEB_RTC
|
||||
return super().frontend_stream_type
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
# Cameras are marked unavailable on stream errors in #54659 however nest streams have
|
||||
# a high error rate (#60353). Given nest streams are so flaky, marking the stream
|
||||
# unavailable has other side effects like not showing the camera image which sometimes
|
||||
# are still able to work. Until the streams are fixed, just leave the streams as available.
|
||||
return True
|
||||
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the source of the stream."""
|
||||
if not self.supported_features & SUPPORT_STREAM:
|
||||
|
||||
@@ -12,7 +12,7 @@ from google_nest_sdm.exceptions import ApiException
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_SUBSCRIBER, DOMAIN
|
||||
from .const import DATA_SDM, DATA_SUBSCRIBER, DOMAIN
|
||||
|
||||
REDACT_DEVICE_TRAITS = {InfoTrait.NAME}
|
||||
|
||||
@@ -21,6 +21,9 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict:
|
||||
"""Return diagnostics for a config entry."""
|
||||
if DATA_SDM not in config_entry.data:
|
||||
return {}
|
||||
|
||||
if DATA_SUBSCRIBER not in hass.data[DOMAIN]:
|
||||
return {"error": "No subscriber configured"}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ffmpeg", "http", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"requirements": ["python-nest==4.1.0", "google-nest-sdm==1.6.0"],
|
||||
"requirements": ["python-nest==4.2.0", "google-nest-sdm==1.7.1"],
|
||||
"codeowners": ["@allenporter"],
|
||||
"quality_scale": "platinum",
|
||||
"dhcp": [
|
||||
|
||||
@@ -26,7 +26,7 @@ CONST_LIST_E_TO_H: list[str] = ["E", "F", "G", "H"]
|
||||
CONST_LIST_I_TO_L: list[str] = ["I", "J", "K", "L"]
|
||||
CONST_LIST_M_TO_Q: list[str] = ["M", "N", "O", "Ö", "P", "Q"]
|
||||
CONST_LIST_R_TO_U: list[str] = ["R", "S", "T", "U", "Ü"]
|
||||
CONST_LIST_V_TO_Z: list[str] = ["V", "W", "X", "Y"]
|
||||
CONST_LIST_V_TO_Z: list[str] = ["V", "W", "X", "Y", "Z"]
|
||||
|
||||
CONST_REGION_A_TO_D: Final = "_a_to_d"
|
||||
CONST_REGION_E_TO_H: Final = "_e_to_h"
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
from homeassistant.components.button import ButtonDeviceClass, ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ENTITY_CATEGORY_CONFIG
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .base import ONVIFBaseEntity
|
||||
@@ -25,7 +25,7 @@ class RebootButton(ONVIFBaseEntity, ButtonEntity):
|
||||
"""Defines a ONVIF reboot button."""
|
||||
|
||||
_attr_device_class = ButtonDeviceClass.RESTART
|
||||
_attr_entity_category = ENTITY_CATEGORY_CONFIG
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(self, device: ONVIFDevice) -> None:
|
||||
"""Initialize the button entity."""
|
||||
|
||||
@@ -70,7 +70,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
await openuv.async_update()
|
||||
except OpenUvError as err:
|
||||
except HomeAssistantError as err:
|
||||
LOGGER.error("Config entry failed: %s", err)
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
|
||||
@@ -121,14 +121,22 @@ async def async_setup_entry(
|
||||
if coordinator.data:
|
||||
if coordinator.data.electricity:
|
||||
for description in SENSOR_TYPES_ELECTRICITY:
|
||||
if description.key == KEY_LAST_ELECTRICITY_COST:
|
||||
if (
|
||||
description.key == KEY_LAST_ELECTRICITY_COST
|
||||
and coordinator.data.electricity[-1] is not None
|
||||
and coordinator.data.electricity[-1].cost is not None
|
||||
):
|
||||
description.native_unit_of_measurement = (
|
||||
coordinator.data.electricity[-1].cost.currency_unit
|
||||
)
|
||||
entities.append(OVOEnergySensor(coordinator, description, client))
|
||||
if coordinator.data.gas:
|
||||
for description in SENSOR_TYPES_GAS:
|
||||
if description.key == KEY_LAST_GAS_COST:
|
||||
if (
|
||||
description.key == KEY_LAST_GAS_COST
|
||||
and coordinator.data.gas[-1] is not None
|
||||
and coordinator.data.gas[-1].cost is not None
|
||||
):
|
||||
description.native_unit_of_measurement = coordinator.data.gas[
|
||||
-1
|
||||
].cost.currency_unit
|
||||
|
||||
@@ -148,9 +148,12 @@ class PhilipsTVDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the system descriptor."""
|
||||
assert self.config_entry
|
||||
assert self.config_entry.unique_id
|
||||
return self.config_entry.unique_id
|
||||
entry: ConfigEntry = self.config_entry
|
||||
assert entry
|
||||
if entry.unique_id:
|
||||
return entry.unique_id
|
||||
assert entry.entry_id
|
||||
return entry.entry_id
|
||||
|
||||
@property
|
||||
def _notify_wanted(self):
|
||||
|
||||
@@ -122,9 +122,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
|
||||
await self.async_set_unique_id(hub.system["serialnumber"])
|
||||
self._abort_if_unique_id_configured()
|
||||
if serialnumber := hub.system.get("serialnumber"):
|
||||
await self.async_set_unique_id(serialnumber)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
self._current[CONF_SYSTEM] = hub.system
|
||||
self._current[CONF_API_VERSION] = hub.api_version
|
||||
|
||||
@@ -82,7 +82,7 @@ async def async_setup_entry(
|
||||
class PhilipsTVMediaPlayer(CoordinatorEntity, MediaPlayerEntity):
|
||||
"""Representation of a Philips TV exposing the JointSpace API."""
|
||||
|
||||
_coordinator: PhilipsTVDataUpdateCoordinator
|
||||
coordinator: PhilipsTVDataUpdateCoordinator
|
||||
_attr_device_class = MediaPlayerDeviceClass.TV
|
||||
|
||||
def __init__(
|
||||
@@ -91,7 +91,6 @@ class PhilipsTVMediaPlayer(CoordinatorEntity, MediaPlayerEntity):
|
||||
) -> None:
|
||||
"""Initialize the Philips TV."""
|
||||
self._tv = coordinator.api
|
||||
self._coordinator = coordinator
|
||||
self._sources = {}
|
||||
self._channels = {}
|
||||
self._supports = SUPPORT_PHILIPS_JS
|
||||
@@ -125,7 +124,7 @@ class PhilipsTVMediaPlayer(CoordinatorEntity, MediaPlayerEntity):
|
||||
def supported_features(self):
|
||||
"""Flag media player features that are supported."""
|
||||
supports = self._supports
|
||||
if self._coordinator.turn_on or (
|
||||
if self.coordinator.turn_on or (
|
||||
self._tv.on and self._tv.powerstate is not None
|
||||
):
|
||||
supports |= SUPPORT_TURN_ON
|
||||
@@ -170,7 +169,7 @@ class PhilipsTVMediaPlayer(CoordinatorEntity, MediaPlayerEntity):
|
||||
await self._tv.setPowerState("On")
|
||||
self._state = STATE_ON
|
||||
else:
|
||||
await self._coordinator.turn_on.async_run(self.hass, self._context)
|
||||
await self.coordinator.turn_on.async_run(self.hass, self._context)
|
||||
await self._async_update_soon()
|
||||
|
||||
async def async_turn_off(self):
|
||||
|
||||
@@ -30,7 +30,7 @@ async def async_setup_entry(
|
||||
class PhilipsTVRemote(CoordinatorEntity, RemoteEntity):
|
||||
"""Device that sends commands."""
|
||||
|
||||
_coordinator: PhilipsTVDataUpdateCoordinator
|
||||
coordinator: PhilipsTVDataUpdateCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -63,7 +63,7 @@ class PhilipsTVRemote(CoordinatorEntity, RemoteEntity):
|
||||
if self._tv.on and self._tv.powerstate:
|
||||
await self._tv.setPowerState("On")
|
||||
else:
|
||||
await self._coordinator.turn_on.async_run(self.hass, self._context)
|
||||
await self.coordinator.turn_on.async_run(self.hass, self._context)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs):
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/plex",
|
||||
"requirements": [
|
||||
"plexapi==4.9.1",
|
||||
"plexapi==4.9.2",
|
||||
"plexauth==0.0.6",
|
||||
"plexwebsocket==0.0.13"
|
||||
],
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
"domain": "proxy",
|
||||
"name": "Camera Proxy",
|
||||
"documentation": "https://www.home-assistant.io/integrations/proxy",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"codeowners": []
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ async def validate_input(hass: HomeAssistant, *, api_key: str, system_id: int) -
|
||||
api_key=api_key,
|
||||
system_id=system_id,
|
||||
)
|
||||
await pvoutput.status()
|
||||
await pvoutput.system()
|
||||
|
||||
|
||||
class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
"""DataUpdateCoordinator for the PVOutput integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pvo import PVOutput, PVOutputAuthenticationError, Status
|
||||
from pvo import PVOutput, PVOutputAuthenticationError, PVOutputNoDataError, Status
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_SYSTEM_ID, DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
|
||||
@@ -33,5 +33,7 @@ class PVOutputDataUpdateCoordinator(DataUpdateCoordinator[Status]):
|
||||
"""Fetch system status from PVOutput."""
|
||||
try:
|
||||
return await self.pvoutput.status()
|
||||
except PVOutputNoDataError as err:
|
||||
raise UpdateFailed("PVOutput has no data available") from err
|
||||
except PVOutputAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pvoutput",
|
||||
"config_flow": true,
|
||||
"codeowners": ["@fabaff", "@frenck"],
|
||||
"requirements": ["pvo==0.2.0"],
|
||||
"requirements": ["pvo==0.2.2"],
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "qrcode",
|
||||
"name": "QR Code",
|
||||
"documentation": "https://www.home-assistant.io/integrations/qrcode",
|
||||
"requirements": ["pillow==9.0.0", "pyzbar==0.1.7"],
|
||||
"requirements": ["pillow==9.0.1", "pyzbar==0.1.7"],
|
||||
"codeowners": [],
|
||||
"iot_class": "calculated"
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"https://github.com/home-assistant/architecture/blob/master/adr/0019-GPIO.md"
|
||||
)
|
||||
|
||||
hass.data[DOMAIN][I2C_HATS_MANAGER] = I2CHatsManager()
|
||||
hass.data[DOMAIN] = {I2C_HATS_MANAGER: I2CHatsManager()}
|
||||
|
||||
def start_i2c_hats_keep_alive(event):
|
||||
"""Start I2C-HATs keep alive."""
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -109,20 +108,12 @@ class I2CHatBinarySensor(BinarySensorEntity):
|
||||
self._device_class = device_class
|
||||
self._state = self.I2C_HATS_MANAGER.read_di(self._address, self._channel)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.I2C_HATS_MANAGER
|
||||
|
||||
def online_callback():
|
||||
"""Call fired when board is online."""
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self.I2C_HATS_MANAGER.register_online_callback,
|
||||
self._address,
|
||||
self._channel,
|
||||
online_callback,
|
||||
self.I2C_HATS_MANAGER.register_online_callback(
|
||||
self._address, self._channel, online_callback
|
||||
)
|
||||
|
||||
def edge_callback(state):
|
||||
@@ -130,11 +121,8 @@ class I2CHatBinarySensor(BinarySensorEntity):
|
||||
self._state = state
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self.I2C_HATS_MANAGER.register_di_callback,
|
||||
self._address,
|
||||
self._channel,
|
||||
edge_callback,
|
||||
self.I2C_HATS_MANAGER.register_di_callback(
|
||||
self._address, self._channel, edge_callback
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -101,7 +100,6 @@ class I2CHatSwitch(SwitchEntity):
|
||||
self._channel = channel
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._invert_logic = invert_logic
|
||||
self._state = initial_state
|
||||
if initial_state is not None:
|
||||
if self._invert_logic:
|
||||
state = not initial_state
|
||||
@@ -109,27 +107,14 @@ class I2CHatSwitch(SwitchEntity):
|
||||
state = initial_state
|
||||
self.I2C_HATS_MANAGER.write_dq(self._address, self._channel, state)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.I2C_HATS_MANAGER
|
||||
def online_callback():
|
||||
"""Call fired when board is online."""
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self.I2C_HATS_MANAGER.register_online_callback,
|
||||
self._address,
|
||||
self._channel,
|
||||
self.online_callback,
|
||||
self.I2C_HATS_MANAGER.register_online_callback(
|
||||
self._address, self._channel, online_callback
|
||||
)
|
||||
|
||||
def online_callback(self):
|
||||
"""Call fired when board is online."""
|
||||
try:
|
||||
self._state = self.I2C_HATS_MANAGER.read_dq(self._address, self._channel)
|
||||
except I2CHatsException as ex:
|
||||
_LOGGER.error(self._log_message(f"Is ON check failed, {ex!s}"))
|
||||
self._state = False
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def _log_message(self, message):
|
||||
"""Create log message."""
|
||||
string = f"{self._name} "
|
||||
@@ -150,7 +135,12 @@ class I2CHatSwitch(SwitchEntity):
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if device is on."""
|
||||
return self._state != self._invert_logic
|
||||
try:
|
||||
state = self.I2C_HATS_MANAGER.read_dq(self._address, self._channel)
|
||||
return state != self._invert_logic
|
||||
except I2CHatsException as ex:
|
||||
_LOGGER.error(self._log_message(f"Is ON check failed, {ex!s}"))
|
||||
return False
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Turn the device on."""
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
import sqlalchemy
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
InternalError,
|
||||
OperationalError,
|
||||
ProgrammingError,
|
||||
@@ -68,20 +69,18 @@ def schema_is_current(current_version):
|
||||
|
||||
def migrate_schema(instance, current_version):
|
||||
"""Check if the schema needs to be upgraded."""
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
_LOGGER.warning(
|
||||
"Database is about to upgrade. Schema version: %s", current_version
|
||||
)
|
||||
for version in range(current_version, SCHEMA_VERSION):
|
||||
new_version = version + 1
|
||||
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
|
||||
_apply_update(instance, session, new_version, current_version)
|
||||
_LOGGER.warning("Database is about to upgrade. Schema version: %s", current_version)
|
||||
for version in range(current_version, SCHEMA_VERSION):
|
||||
new_version = version + 1
|
||||
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
|
||||
_apply_update(instance, new_version, current_version)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
session.add(SchemaChanges(schema_version=new_version))
|
||||
|
||||
_LOGGER.info("Upgrade to version %s done", new_version)
|
||||
_LOGGER.info("Upgrade to version %s done", new_version)
|
||||
|
||||
|
||||
def _create_index(connection, table_name, index_name):
|
||||
def _create_index(instance, table_name, index_name):
|
||||
"""Create an index for the specified table.
|
||||
|
||||
The index name should match the name given for the index
|
||||
@@ -102,18 +101,20 @@ def _create_index(connection, table_name, index_name):
|
||||
"be patient!",
|
||||
index_name,
|
||||
)
|
||||
try:
|
||||
index.create(connection)
|
||||
except (InternalError, ProgrammingError, OperationalError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Index %s already exists on %s, continuing", index_name, table_name
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
index.create(connection)
|
||||
except (InternalError, OperationalError, ProgrammingError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Index %s already exists on %s, continuing", index_name, table_name
|
||||
)
|
||||
|
||||
_LOGGER.debug("Finished creating %s", index_name)
|
||||
|
||||
|
||||
def _drop_index(connection, table_name, index_name):
|
||||
def _drop_index(instance, table_name, index_name):
|
||||
"""Drop an index from a specified table.
|
||||
|
||||
There is no universal way to do something like `DROP INDEX IF EXISTS`
|
||||
@@ -128,42 +129,48 @@ def _drop_index(connection, table_name, index_name):
|
||||
success = False
|
||||
|
||||
# Engines like DB2/Oracle
|
||||
try:
|
||||
connection.execute(text(f"DROP INDEX {index_name}"))
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(text(f"DROP INDEX {index_name}"))
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
# Engines like SQLite, SQL Server
|
||||
if not success:
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {table}.{index}".format(
|
||||
index=index_name, table=table_name
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {table}.{index}".format(
|
||||
index=index_name, table=table_name
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
if not success:
|
||||
# Engines like MySQL, MS Access
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {index} ON {table}".format(
|
||||
index=index_name, table=table_name
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"DROP INDEX {index} ON {table}".format(
|
||||
index=index_name, table=table_name
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
except SQLAlchemyError:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
|
||||
if success:
|
||||
_LOGGER.debug(
|
||||
@@ -184,7 +191,7 @@ def _drop_index(connection, table_name, index_name):
|
||||
)
|
||||
|
||||
|
||||
def _add_columns(connection, table_name, columns_def):
|
||||
def _add_columns(instance, table_name, columns_def):
|
||||
"""Add columns to a table."""
|
||||
_LOGGER.warning(
|
||||
"Adding columns %s to table %s. Note: this can take several "
|
||||
@@ -196,39 +203,43 @@ def _add_columns(connection, table_name, columns_def):
|
||||
|
||||
columns_def = [f"ADD {col_def}" for col_def in columns_def]
|
||||
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
# Some engines support adding all columns at once,
|
||||
# this error is when they don't
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Column %s already exists on %s, continuing",
|
||||
column_def.split(" ")[1],
|
||||
table_name,
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError, ProgrammingError):
|
||||
# Some engines support adding all columns at once,
|
||||
# this error is when they don't
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError, ProgrammingError) as err:
|
||||
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
|
||||
_LOGGER.warning(
|
||||
"Column %s already exists on %s, continuing",
|
||||
column_def.split(" ")[1],
|
||||
table_name,
|
||||
)
|
||||
|
||||
|
||||
def _modify_columns(connection, engine, table_name, columns_def):
|
||||
def _modify_columns(instance, engine, table_name, columns_def):
|
||||
"""Modify columns in a table."""
|
||||
if engine.dialect.name == "sqlite":
|
||||
_LOGGER.debug(
|
||||
@@ -260,34 +271,38 @@ def _modify_columns(connection, engine, table_name, columns_def):
|
||||
else:
|
||||
columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
|
||||
|
||||
try:
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
"ALTER TABLE {table} {columns_def}".format(
|
||||
table=table_name, columns_def=", ".join(columns_def)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not modify column %s in table %s", column_def, table_name
|
||||
)
|
||||
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
"ALTER TABLE {table} {column_def}".format(
|
||||
table=table_name, column_def=column_def
|
||||
)
|
||||
)
|
||||
)
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not modify column %s in table %s", column_def, table_name
|
||||
)
|
||||
|
||||
|
||||
def _update_states_table_with_foreign_key_options(connection, engine):
|
||||
def _update_states_table_with_foreign_key_options(instance, engine):
|
||||
"""Add the options to foreign key constraints."""
|
||||
inspector = sqlalchemy.inspect(engine)
|
||||
alters = []
|
||||
@@ -315,18 +330,20 @@ def _update_states_table_with_foreign_key_options(connection, engine):
|
||||
)
|
||||
|
||||
for alter in alters:
|
||||
try:
|
||||
connection.execute(DropConstraint(alter["old_fk"]))
|
||||
for fkc in states_key_constraints:
|
||||
if fkc.column_keys == alter["columns"]:
|
||||
connection.execute(AddConstraint(fkc))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not update foreign options in %s table", TABLE_STATES
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(DropConstraint(alter["old_fk"]))
|
||||
for fkc in states_key_constraints:
|
||||
if fkc.column_keys == alter["columns"]:
|
||||
connection.execute(AddConstraint(fkc))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not update foreign options in %s table", TABLE_STATES
|
||||
)
|
||||
|
||||
|
||||
def _drop_foreign_key_constraints(connection, engine, table, columns):
|
||||
def _drop_foreign_key_constraints(instance, engine, table, columns):
|
||||
"""Drop foreign key constraints for a table on specific columns."""
|
||||
inspector = sqlalchemy.inspect(engine)
|
||||
drops = []
|
||||
@@ -344,27 +361,28 @@ def _drop_foreign_key_constraints(connection, engine, table, columns):
|
||||
)
|
||||
|
||||
for drop in drops:
|
||||
try:
|
||||
connection.execute(DropConstraint(drop))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not drop foreign constraints in %s table on %s",
|
||||
TABLE_STATES,
|
||||
columns,
|
||||
)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
try:
|
||||
connection = session.connection()
|
||||
connection.execute(DropConstraint(drop))
|
||||
except (InternalError, OperationalError):
|
||||
_LOGGER.exception(
|
||||
"Could not drop foreign constraints in %s table on %s",
|
||||
TABLE_STATES,
|
||||
columns,
|
||||
)
|
||||
|
||||
|
||||
def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
def _apply_update(instance, new_version, old_version): # noqa: C901
|
||||
"""Perform operations to bring schema up to date."""
|
||||
engine = instance.engine
|
||||
connection = session.connection()
|
||||
if new_version == 1:
|
||||
_create_index(connection, "events", "ix_events_time_fired")
|
||||
_create_index(instance, "events", "ix_events_time_fired")
|
||||
elif new_version == 2:
|
||||
# Create compound start/end index for recorder_runs
|
||||
_create_index(connection, "recorder_runs", "ix_recorder_runs_start_end")
|
||||
_create_index(instance, "recorder_runs", "ix_recorder_runs_start_end")
|
||||
# Create indexes for states
|
||||
_create_index(connection, "states", "ix_states_last_updated")
|
||||
_create_index(instance, "states", "ix_states_last_updated")
|
||||
elif new_version == 3:
|
||||
# There used to be a new index here, but it was removed in version 4.
|
||||
pass
|
||||
@@ -374,41 +392,41 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
|
||||
if old_version == 3:
|
||||
# Remove index that was added in version 3
|
||||
_drop_index(connection, "states", "ix_states_created_domain")
|
||||
_drop_index(instance, "states", "ix_states_created_domain")
|
||||
if old_version == 2:
|
||||
# Remove index that was added in version 2
|
||||
_drop_index(connection, "states", "ix_states_entity_id_created")
|
||||
_drop_index(instance, "states", "ix_states_entity_id_created")
|
||||
|
||||
# Remove indexes that were added in version 0
|
||||
_drop_index(connection, "states", "states__state_changes")
|
||||
_drop_index(connection, "states", "states__significant_changes")
|
||||
_drop_index(connection, "states", "ix_states_entity_id_created")
|
||||
_drop_index(instance, "states", "states__state_changes")
|
||||
_drop_index(instance, "states", "states__significant_changes")
|
||||
_drop_index(instance, "states", "ix_states_entity_id_created")
|
||||
|
||||
_create_index(connection, "states", "ix_states_entity_id_last_updated")
|
||||
_create_index(instance, "states", "ix_states_entity_id_last_updated")
|
||||
elif new_version == 5:
|
||||
# Create supporting index for States.event_id foreign key
|
||||
_create_index(connection, "states", "ix_states_event_id")
|
||||
_create_index(instance, "states", "ix_states_event_id")
|
||||
elif new_version == 6:
|
||||
_add_columns(
|
||||
session,
|
||||
instance,
|
||||
"events",
|
||||
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
|
||||
)
|
||||
_create_index(connection, "events", "ix_events_context_id")
|
||||
_create_index(connection, "events", "ix_events_context_user_id")
|
||||
_create_index(instance, "events", "ix_events_context_id")
|
||||
_create_index(instance, "events", "ix_events_context_user_id")
|
||||
_add_columns(
|
||||
connection,
|
||||
instance,
|
||||
"states",
|
||||
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
|
||||
)
|
||||
_create_index(connection, "states", "ix_states_context_id")
|
||||
_create_index(connection, "states", "ix_states_context_user_id")
|
||||
_create_index(instance, "states", "ix_states_context_id")
|
||||
_create_index(instance, "states", "ix_states_context_user_id")
|
||||
elif new_version == 7:
|
||||
_create_index(connection, "states", "ix_states_entity_id")
|
||||
_create_index(instance, "states", "ix_states_entity_id")
|
||||
elif new_version == 8:
|
||||
_add_columns(connection, "events", ["context_parent_id CHARACTER(36)"])
|
||||
_add_columns(connection, "states", ["old_state_id INTEGER"])
|
||||
_create_index(connection, "events", "ix_events_context_parent_id")
|
||||
_add_columns(instance, "events", ["context_parent_id CHARACTER(36)"])
|
||||
_add_columns(instance, "states", ["old_state_id INTEGER"])
|
||||
_create_index(instance, "events", "ix_events_context_parent_id")
|
||||
elif new_version == 9:
|
||||
# We now get the context from events with a join
|
||||
# since its always there on state_changed events
|
||||
@@ -418,36 +436,36 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
# and we would have to move to something like
|
||||
# sqlalchemy alembic to make that work
|
||||
#
|
||||
_drop_index(connection, "states", "ix_states_context_id")
|
||||
_drop_index(connection, "states", "ix_states_context_user_id")
|
||||
_drop_index(instance, "states", "ix_states_context_id")
|
||||
_drop_index(instance, "states", "ix_states_context_user_id")
|
||||
# This index won't be there if they were not running
|
||||
# nightly but we don't treat that as a critical issue
|
||||
_drop_index(connection, "states", "ix_states_context_parent_id")
|
||||
_drop_index(instance, "states", "ix_states_context_parent_id")
|
||||
# Redundant keys on composite index:
|
||||
# We already have ix_states_entity_id_last_updated
|
||||
_drop_index(connection, "states", "ix_states_entity_id")
|
||||
_create_index(connection, "events", "ix_events_event_type_time_fired")
|
||||
_drop_index(connection, "events", "ix_events_event_type")
|
||||
_drop_index(instance, "states", "ix_states_entity_id")
|
||||
_create_index(instance, "events", "ix_events_event_type_time_fired")
|
||||
_drop_index(instance, "events", "ix_events_event_type")
|
||||
elif new_version == 10:
|
||||
# Now done in step 11
|
||||
pass
|
||||
elif new_version == 11:
|
||||
_create_index(connection, "states", "ix_states_old_state_id")
|
||||
_update_states_table_with_foreign_key_options(connection, engine)
|
||||
_create_index(instance, "states", "ix_states_old_state_id")
|
||||
_update_states_table_with_foreign_key_options(instance, engine)
|
||||
elif new_version == 12:
|
||||
if engine.dialect.name == "mysql":
|
||||
_modify_columns(connection, engine, "events", ["event_data LONGTEXT"])
|
||||
_modify_columns(connection, engine, "states", ["attributes LONGTEXT"])
|
||||
_modify_columns(instance, engine, "events", ["event_data LONGTEXT"])
|
||||
_modify_columns(instance, engine, "states", ["attributes LONGTEXT"])
|
||||
elif new_version == 13:
|
||||
if engine.dialect.name == "mysql":
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"events",
|
||||
["time_fired DATETIME(6)", "created DATETIME(6)"],
|
||||
)
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"states",
|
||||
[
|
||||
@@ -457,14 +475,12 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
],
|
||||
)
|
||||
elif new_version == 14:
|
||||
_modify_columns(connection, engine, "events", ["event_type VARCHAR(64)"])
|
||||
_modify_columns(instance, engine, "events", ["event_type VARCHAR(64)"])
|
||||
elif new_version == 15:
|
||||
# This dropped the statistics table, done again in version 18.
|
||||
pass
|
||||
elif new_version == 16:
|
||||
_drop_foreign_key_constraints(
|
||||
connection, engine, TABLE_STATES, ["old_state_id"]
|
||||
)
|
||||
_drop_foreign_key_constraints(instance, engine, TABLE_STATES, ["old_state_id"])
|
||||
elif new_version == 17:
|
||||
# This dropped the statistics table, done again in version 18.
|
||||
pass
|
||||
@@ -489,12 +505,13 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
elif new_version == 19:
|
||||
# This adds the statistic runs table, insert a fake run to prevent duplicating
|
||||
# statistics.
|
||||
session.add(StatisticsRuns(start=get_start_time()))
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
session.add(StatisticsRuns(start=get_start_time()))
|
||||
elif new_version == 20:
|
||||
# This changed the precision of statistics from float to double
|
||||
if engine.dialect.name in ["mysql", "postgresql"]:
|
||||
_modify_columns(
|
||||
connection,
|
||||
instance,
|
||||
engine,
|
||||
"statistics",
|
||||
[
|
||||
@@ -516,14 +533,16 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
table,
|
||||
)
|
||||
with contextlib.suppress(SQLAlchemyError):
|
||||
connection.execute(
|
||||
# Using LOCK=EXCLUSIVE to prevent the database from corrupting
|
||||
# https://github.com/home-assistant/core/issues/56104
|
||||
text(
|
||||
f"ALTER TABLE {table} CONVERT TO "
|
||||
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE"
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
# Using LOCK=EXCLUSIVE to prevent the database from corrupting
|
||||
# https://github.com/home-assistant/core/issues/56104
|
||||
text(
|
||||
f"ALTER TABLE {table} CONVERT TO "
|
||||
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE"
|
||||
)
|
||||
)
|
||||
)
|
||||
elif new_version == 22:
|
||||
# Recreate the all statistics tables for Oracle DB with Identity columns
|
||||
#
|
||||
@@ -549,60 +568,76 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
||||
# Block 5-minute statistics for one hour from the last run, or it will overlap
|
||||
# with existing hourly statistics. Don't block on a database with no existing
|
||||
# statistics.
|
||||
if session.query(Statistics.id).count() and (
|
||||
last_run_string := session.query(func.max(StatisticsRuns.start)).scalar()
|
||||
):
|
||||
last_run_start_time = process_timestamp(last_run_string)
|
||||
if last_run_start_time:
|
||||
fake_start_time = last_run_start_time + timedelta(minutes=5)
|
||||
while fake_start_time < last_run_start_time + timedelta(hours=1):
|
||||
session.add(StatisticsRuns(start=fake_start_time))
|
||||
fake_start_time += timedelta(minutes=5)
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
if session.query(Statistics.id).count() and (
|
||||
last_run_string := session.query(
|
||||
func.max(StatisticsRuns.start)
|
||||
).scalar()
|
||||
):
|
||||
last_run_start_time = process_timestamp(last_run_string)
|
||||
if last_run_start_time:
|
||||
fake_start_time = last_run_start_time + timedelta(minutes=5)
|
||||
while fake_start_time < last_run_start_time + timedelta(hours=1):
|
||||
session.add(StatisticsRuns(start=fake_start_time))
|
||||
fake_start_time += timedelta(minutes=5)
|
||||
|
||||
# When querying the database, be careful to only explicitly query for columns
|
||||
# which were present in schema version 21. If querying the table, SQLAlchemy
|
||||
# will refer to future columns.
|
||||
for sum_statistic in session.query(StatisticsMeta.id).filter_by(has_sum=true()):
|
||||
last_statistic = (
|
||||
session.query(
|
||||
Statistics.start,
|
||||
Statistics.last_reset,
|
||||
Statistics.state,
|
||||
Statistics.sum,
|
||||
)
|
||||
.filter_by(metadata_id=sum_statistic.id)
|
||||
.order_by(Statistics.start.desc())
|
||||
.first()
|
||||
)
|
||||
if last_statistic:
|
||||
session.add(
|
||||
StatisticsShortTerm(
|
||||
metadata_id=sum_statistic.id,
|
||||
start=last_statistic.start,
|
||||
last_reset=last_statistic.last_reset,
|
||||
state=last_statistic.state,
|
||||
sum=last_statistic.sum,
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
for sum_statistic in session.query(StatisticsMeta.id).filter_by(
|
||||
has_sum=true()
|
||||
):
|
||||
last_statistic = (
|
||||
session.query(
|
||||
Statistics.start,
|
||||
Statistics.last_reset,
|
||||
Statistics.state,
|
||||
Statistics.sum,
|
||||
)
|
||||
.filter_by(metadata_id=sum_statistic.id)
|
||||
.order_by(Statistics.start.desc())
|
||||
.first()
|
||||
)
|
||||
if last_statistic:
|
||||
session.add(
|
||||
StatisticsShortTerm(
|
||||
metadata_id=sum_statistic.id,
|
||||
start=last_statistic.start,
|
||||
last_reset=last_statistic.last_reset,
|
||||
state=last_statistic.state,
|
||||
sum=last_statistic.sum,
|
||||
)
|
||||
)
|
||||
elif new_version == 23:
|
||||
# Add name column to StatisticsMeta
|
||||
_add_columns(session, "statistics_meta", ["name VARCHAR(255)"])
|
||||
_add_columns(instance, "statistics_meta", ["name VARCHAR(255)"])
|
||||
elif new_version == 24:
|
||||
# Delete duplicated statistics
|
||||
delete_duplicates(instance, session)
|
||||
# Recreate statistics indices to block duplicated statistics
|
||||
_drop_index(connection, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(connection, "statistics", "ix_statistics_statistic_id_start")
|
||||
_drop_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_drop_index(
|
||||
connection,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
_create_index(
|
||||
connection,
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
try:
|
||||
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
except DatabaseError:
|
||||
# There may be duplicated statistics entries, delete duplicated statistics
|
||||
# and try again
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
delete_duplicates(instance, session)
|
||||
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
|
||||
_create_index(
|
||||
instance,
|
||||
"statistics_short_term",
|
||||
"ix_statistics_short_term_statistic_id_start",
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"No schema migration defined for version {new_version}")
|
||||
|
||||
@@ -119,8 +119,6 @@ QUERY_STATISTIC_META_ID = [
|
||||
StatisticsMeta.statistic_id,
|
||||
]
|
||||
|
||||
MAX_DUPLICATES = 1000000
|
||||
|
||||
STATISTICS_BAKERY = "recorder_statistics_bakery"
|
||||
STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery"
|
||||
STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery"
|
||||
@@ -292,7 +290,7 @@ def _find_duplicates(
|
||||
)
|
||||
.filter(subquery.c.is_duplicate == 1)
|
||||
.order_by(table.metadata_id, table.start, table.id.desc())
|
||||
.limit(MAX_ROWS_TO_PURGE)
|
||||
.limit(1000 * MAX_ROWS_TO_PURGE)
|
||||
)
|
||||
duplicates = execute(query)
|
||||
original_as_dict = {}
|
||||
@@ -345,14 +343,13 @@ def _delete_duplicates_from_table(
|
||||
if not duplicate_ids:
|
||||
break
|
||||
all_non_identical_duplicates.extend(non_identical_duplicates)
|
||||
deleted_rows = (
|
||||
session.query(table)
|
||||
.filter(table.id.in_(duplicate_ids))
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
total_deleted_rows += deleted_rows
|
||||
if total_deleted_rows >= MAX_DUPLICATES:
|
||||
break
|
||||
for i in range(0, len(duplicate_ids), MAX_ROWS_TO_PURGE):
|
||||
deleted_rows = (
|
||||
session.query(table)
|
||||
.filter(table.id.in_(duplicate_ids[i : i + MAX_ROWS_TO_PURGE]))
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
total_deleted_rows += deleted_rows
|
||||
return (total_deleted_rows, all_non_identical_duplicates)
|
||||
|
||||
|
||||
@@ -389,13 +386,6 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None:
|
||||
backup_path,
|
||||
)
|
||||
|
||||
if deleted_statistics_rows >= MAX_DUPLICATES:
|
||||
_LOGGER.warning(
|
||||
"Found more than %s duplicated statistic rows, please report at "
|
||||
'https://github.com/home-assistant/core/issues?q=is%%3Aissue+label%%3A"integration%%3A+recorder"+',
|
||||
MAX_DUPLICATES - 1,
|
||||
)
|
||||
|
||||
deleted_short_term_statistics_rows, _ = _delete_duplicates_from_table(
|
||||
session, StatisticsShortTerm
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renault",
|
||||
"requirements": [
|
||||
"renault-api==0.1.7"
|
||||
"renault-api==0.1.8"
|
||||
],
|
||||
"codeowners": [
|
||||
"@epenet"
|
||||
|
||||
@@ -20,7 +20,6 @@ from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
CONF_USERNAME,
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
MASS_KILOGRAMS,
|
||||
POWER_WATT,
|
||||
@@ -33,6 +32,7 @@ from homeassistant.exceptions import PlatformNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -131,17 +131,19 @@ async def async_setup_platform(
|
||||
|
||||
return values
|
||||
|
||||
@callback
|
||||
def start_update_interval(event):
|
||||
"""Start the update interval scheduling."""
|
||||
nonlocal remove_interval_update
|
||||
remove_interval_update = async_track_time_interval_backoff(hass, async_saj)
|
||||
|
||||
@callback
|
||||
def stop_update_interval(event):
|
||||
"""Properly cancel the scheduled update."""
|
||||
remove_interval_update() # pylint: disable=not-callable
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_update_interval)
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, stop_update_interval)
|
||||
async_at_start(hass, start_update_interval)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -113,8 +113,9 @@ class Scene(RestoreEntity):
|
||||
self.async_write_ha_state()
|
||||
await self.async_activate(**kwargs)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when the button is added to hass."""
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the scene is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state is not None:
|
||||
self.__last_activated = state.state
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "seven_segments",
|
||||
"name": "Seven Segments OCR",
|
||||
"documentation": "https://www.home-assistant.io/integrations/seven_segments",
|
||||
"requirements": ["pillow==9.0.0"],
|
||||
"requirements": ["pillow==9.0.1"],
|
||||
"codeowners": ["@fabaff"],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user