forked from home-assistant/core
Compare commits
177 Commits
2022.9.0b2
...
2022.9.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a411cd9c20 | ||
|
|
da81dbe6ac | ||
|
|
f5c30ab10a | ||
|
|
454675d86b | ||
|
|
cce4496ad6 | ||
|
|
ebeebeaec1 | ||
|
|
c8d16175da | ||
|
|
a2aa0e608d | ||
|
|
7eb98ffbd1 | ||
|
|
6e62080cd9 | ||
|
|
39dee6d426 | ||
|
|
3a89a49d4a | ||
|
|
ef66d8e705 | ||
|
|
c1809681b6 | ||
|
|
050c09df62 | ||
|
|
e0b63ac488 | ||
|
|
ed6575fefb | ||
|
|
318ae7750a | ||
|
|
0525a1cd97 | ||
|
|
d31d4e2916 | ||
|
|
40c5689507 | ||
|
|
a4749178f1 | ||
|
|
8229e241f1 | ||
|
|
2b40f3f1e5 | ||
|
|
e839849456 | ||
|
|
e711758cfd | ||
|
|
896955e4df | ||
|
|
7b83807baa | ||
|
|
6a197332c7 | ||
|
|
1955ff9e0d | ||
|
|
29caf06439 | ||
|
|
0b5953038e | ||
|
|
f07e1bc500 | ||
|
|
843d5f101a | ||
|
|
d98ed5c6f6 | ||
|
|
8599472880 | ||
|
|
04d6bb085b | ||
|
|
6f9a311cec | ||
|
|
336179df6d | ||
|
|
9459af30b0 | ||
|
|
ee07ca8caa | ||
|
|
3beed13586 | ||
|
|
f0753f7a97 | ||
|
|
dd007cd765 | ||
|
|
7cdac3ee8c | ||
|
|
cd7f65bb6a | ||
|
|
b21a37cad5 | ||
|
|
bfcb9402ef | ||
|
|
ad396f0538 | ||
|
|
12edfb3929 | ||
|
|
47f6be77cc | ||
|
|
9acf74d783 | ||
|
|
0aa2685e0c | ||
|
|
a90b6d37bf | ||
|
|
d6bf1a8caa | ||
|
|
95a89448e0 | ||
|
|
f6d26476b5 | ||
|
|
9640553b52 | ||
|
|
3129114d07 | ||
|
|
184a1c95f0 | ||
|
|
f18ab504a5 | ||
|
|
2bd71f62ea | ||
|
|
296db8b2af | ||
|
|
a277664187 | ||
|
|
1b7a06912a | ||
|
|
e7986a54a5 | ||
|
|
de8b066a1d | ||
|
|
4d4a87ba05 | ||
|
|
4b79e82e31 | ||
|
|
1e8f461270 | ||
|
|
6e88b8d3d5 | ||
|
|
a626ab4f1a | ||
|
|
c7cb0d1a07 | ||
|
|
183c61b6ca | ||
|
|
95c20df367 | ||
|
|
a969ce273a | ||
|
|
5f90760176 | ||
|
|
795be361b4 | ||
|
|
cdd5c809bb | ||
|
|
c731e2f125 | ||
|
|
1789a8a385 | ||
|
|
57717f13fc | ||
|
|
e4aab6a818 | ||
|
|
258791626e | ||
|
|
78802c8480 | ||
|
|
b24f3725d6 | ||
|
|
06116f76fa | ||
|
|
27c0a37053 | ||
|
|
2b961fd327 | ||
|
|
125afb39f0 | ||
|
|
3ee62d619f | ||
|
|
dc7c860c6a | ||
|
|
f042cc5d7b | ||
|
|
4c0872b4e4 | ||
|
|
21f6b50f7c | ||
|
|
d670df74cb | ||
|
|
0a7f3f6ced | ||
|
|
fee9a303ff | ||
|
|
a4f398a750 | ||
|
|
c873eae79c | ||
|
|
d559b6482a | ||
|
|
760853f615 | ||
|
|
cfe8ebdad4 | ||
|
|
2ddd1b516c | ||
|
|
3b025b211e | ||
|
|
4009a32fb5 | ||
|
|
6f3b49601e | ||
|
|
31858ad779 | ||
|
|
ab9d9d599e | ||
|
|
ce6d337bd5 | ||
|
|
3fd887b1f2 | ||
|
|
996a3477b0 | ||
|
|
910f27f3a2 | ||
|
|
4ab5cdcb79 | ||
|
|
e69fde6875 | ||
|
|
10f7e2ff8a | ||
|
|
3acc3af38c | ||
|
|
a3edbfc601 | ||
|
|
941a5e3820 | ||
|
|
2eeab820b7 | ||
|
|
8d0ebdd1f9 | ||
|
|
9901b31316 | ||
|
|
a4f528e908 | ||
|
|
9aa87761cf | ||
|
|
d1b637ea7a | ||
|
|
c8ad8a6d86 | ||
|
|
9155f669e9 | ||
|
|
e1e153f391 | ||
|
|
1dbcf88e15 | ||
|
|
a13438c5b0 | ||
|
|
d98687b789 | ||
|
|
319b0b8902 | ||
|
|
62dcbc4d4a | ||
|
|
6989b16274 | ||
|
|
31d085cdf8 | ||
|
|
61ee621c90 | ||
|
|
f5e61ecdec | ||
|
|
2bfcdc66b6 | ||
|
|
3240f8f938 | ||
|
|
74ddc336ca | ||
|
|
6c36d5acaa | ||
|
|
e8c4711d88 | ||
|
|
bca9dc1f61 | ||
|
|
4f8421617e | ||
|
|
40421b41f7 | ||
|
|
b0ff4fc057 | ||
|
|
605e350159 | ||
|
|
ad8cd9c957 | ||
|
|
e8ab4eef44 | ||
|
|
b1241bf0f2 | ||
|
|
f3e811417f | ||
|
|
1231ba4d03 | ||
|
|
e07554dc25 | ||
|
|
2fa517b81b | ||
|
|
0d042d496d | ||
|
|
c8156d5de6 | ||
|
|
9f06baa778 | ||
|
|
52abf0851b | ||
|
|
da83ceca5b | ||
|
|
f9b95cc4a4 | ||
|
|
f60ae40661 | ||
|
|
ea0b406692 | ||
|
|
9387449abf | ||
|
|
5f4013164c | ||
|
|
3856178dc0 | ||
|
|
32a9fba58e | ||
|
|
9733887b6a | ||
|
|
b215514c90 | ||
|
|
0e930fd626 | ||
|
|
cd4c31bc79 | ||
|
|
bc04755d05 | ||
|
|
041eaf27a9 | ||
|
|
d6a99da461 | ||
|
|
1d2439a6e5 | ||
|
|
6fff633325 | ||
|
|
9652c0c326 | ||
|
|
36c1b9a419 |
@@ -587,7 +587,7 @@ omit =
|
||||
homeassistant/components/iqvia/sensor.py
|
||||
homeassistant/components/irish_rail_transport/sensor.py
|
||||
homeassistant/components/iss/__init__.py
|
||||
homeassistant/components/iss/binary_sensor.py
|
||||
homeassistant/components/iss/sensor.py
|
||||
homeassistant/components/isy994/__init__.py
|
||||
homeassistant/components/isy994/binary_sensor.py
|
||||
homeassistant/components/isy994/climate.py
|
||||
@@ -1216,7 +1216,7 @@ omit =
|
||||
homeassistant/components/switchbot/const.py
|
||||
homeassistant/components/switchbot/entity.py
|
||||
homeassistant/components/switchbot/cover.py
|
||||
homeassistant/components/switchbot/light.py
|
||||
homeassistant/components/switchbot/light.py
|
||||
homeassistant/components/switchbot/sensor.py
|
||||
homeassistant/components/switchbot/coordinator.py
|
||||
homeassistant/components/switchmate/switch.py
|
||||
|
||||
19
.github/workflows/ci.yaml
vendored
19
.github/workflows/ci.yaml
vendored
@@ -169,7 +169,6 @@ jobs:
|
||||
uses: actions/setup-python@v4.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache: "pip"
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.8
|
||||
@@ -484,7 +483,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ matrix.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore pip wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -492,10 +491,10 @@ jobs:
|
||||
with:
|
||||
path: ${{ env.PIP_CACHE }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ matrix.python-version }}-${{
|
||||
steps.generate-pip-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-pip-${{ env.PIP_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-
|
||||
${{ runner.os }}-${{ matrix.python-version }}-pip-${{ env.PIP_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -542,7 +541,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -574,7 +573,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -607,7 +606,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -651,7 +650,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -699,7 +698,7 @@ jobs:
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ matrix.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -752,7 +751,7 @@ jobs:
|
||||
uses: actions/cache@v3.0.8
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: ${{ runner.os }}-${{ matrix.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
|
||||
@@ -137,6 +137,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/blebox/ @bbx-a @riokuu
|
||||
/homeassistant/components/blink/ @fronzbot
|
||||
/tests/components/blink/ @fronzbot
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
/tests/components/blueprint/ @home-assistant/core
|
||||
/homeassistant/components/bluesound/ @thrawnarn
|
||||
@@ -275,7 +277,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ecobee/ @marthoc
|
||||
/homeassistant/components/econet/ @vangorra @w1ll1am23
|
||||
/tests/components/econet/ @vangorra @w1ll1am23
|
||||
/homeassistant/components/ecovacs/ @OverloadUT
|
||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185
|
||||
/homeassistant/components/ecowitt/ @pvizeli
|
||||
/tests/components/ecowitt/ @pvizeli
|
||||
/homeassistant/components/edl21/ @mtdcr
|
||||
@@ -865,8 +867,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/pvpc_hourly_pricing/ @azogue
|
||||
/tests/components/pvpc_hourly_pricing/ @azogue
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qingping/ @bdraco @skgsergio
|
||||
/tests/components/qingping/ @bdraco @skgsergio
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
/tests/components/qld_bushfire/ @exxamalte
|
||||
/homeassistant/components/qnap_qsw/ @Noltari
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
},
|
||||
"description": "Go to {api_url} to generate an API key"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "Invalid API key",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "Unexpected error"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ from datetime import timedelta
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DEFAULT_TIMEOUT = 15
|
||||
DEFAULT_TIMEOUT = 25
|
||||
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file"
|
||||
CONF_LOGIN_METHOD = "login_method"
|
||||
|
||||
@@ -9,6 +9,7 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant.components import blueprint
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -20,6 +21,7 @@ from homeassistant.const import (
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
@@ -224,6 +226,21 @@ def areas_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||
return list(automation_entity.referenced_areas)
|
||||
|
||||
|
||||
@callback
|
||||
def automations_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[str]:
|
||||
"""Return all automations that reference the blueprint."""
|
||||
if DOMAIN not in hass.data:
|
||||
return []
|
||||
|
||||
component = hass.data[DOMAIN]
|
||||
|
||||
return [
|
||||
automation_entity.entity_id
|
||||
for automation_entity in component.entities
|
||||
if automation_entity.referenced_blueprint == blueprint_path
|
||||
]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up all automations."""
|
||||
hass.data[DOMAIN] = component = EntityComponent(LOGGER, DOMAIN, hass)
|
||||
@@ -346,7 +363,14 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
return self.action_script.referenced_areas
|
||||
|
||||
@property
|
||||
def referenced_devices(self):
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
"""Return referenced blueprint or None."""
|
||||
if self._blueprint_inputs is None:
|
||||
return None
|
||||
return cast(str, self._blueprint_inputs[CONF_USE_BLUEPRINT][CONF_PATH])
|
||||
|
||||
@property
|
||||
def referenced_devices(self) -> set[str]:
|
||||
"""Return a set of referenced devices."""
|
||||
if self._referenced_devices is not None:
|
||||
return self._referenced_devices
|
||||
|
||||
@@ -8,8 +8,15 @@ from .const import DOMAIN, LOGGER
|
||||
DATA_BLUEPRINTS = "automation_blueprints"
|
||||
|
||||
|
||||
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
||||
"""Return True if any automation references the blueprint."""
|
||||
from . import automations_with_blueprint # pylint: disable=import-outside-toplevel
|
||||
|
||||
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
||||
|
||||
|
||||
@singleton(DATA_BLUEPRINTS)
|
||||
@callback
|
||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||
"""Get automation blueprints."""
|
||||
return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER)
|
||||
return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER, _blueprint_in_use)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "blink",
|
||||
"name": "Blink",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"requirements": ["blinkpy==0.19.0"],
|
||||
"requirements": ["blinkpy==0.19.2"],
|
||||
"codeowners": ["@fronzbot"],
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
49
homeassistant/components/bluemaestro/__init__.py
Normal file
49
homeassistant/components/bluemaestro/__init__.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""The BlueMaestro integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from bluemaestro_ble import BlueMaestroBluetoothDeviceData
|
||||
|
||||
from homeassistant.components.bluetooth import BluetoothScanningMode
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up BlueMaestro BLE device from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
data = BlueMaestroBluetoothDeviceData()
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
] = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
coordinator.async_start()
|
||||
) # only start after all platforms have had a chance to subscribe
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
94
homeassistant/components/bluemaestro/config_flow.py
Normal file
94
homeassistant/components/bluemaestro/config_flow.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Config flow for bluemaestro ble integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bluemaestro_ble import BlueMaestroBluetoothDeviceData as DeviceData
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for bluemaestro."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovery_info: BluetoothServiceInfoBleak | None = None
|
||||
self._discovered_device: DeviceData | None = None
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> FlowResult:
|
||||
"""Handle the bluetooth discovery step."""
|
||||
await self.async_set_unique_id(discovery_info.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
device = DeviceData()
|
||||
if not device.supported(discovery_info):
|
||||
return self.async_abort(reason="not_supported")
|
||||
self._discovery_info = discovery_info
|
||||
self._discovered_device = device
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
device = self._discovered_device
|
||||
assert self._discovery_info is not None
|
||||
discovery_info = self._discovery_info
|
||||
title = device.title or device.get_device_name() or discovery_info.name
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title=title, data={})
|
||||
|
||||
self._set_confirm_only()
|
||||
placeholders = {"name": title}
|
||||
self.context["title_placeholders"] = placeholders
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm", description_placeholders=placeholders
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the user step to pick discovered device."""
|
||||
if user_input is not None:
|
||||
address = user_input[CONF_ADDRESS]
|
||||
await self.async_set_unique_id(address, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
continue
|
||||
device = DeviceData()
|
||||
if device.supported(discovery_info):
|
||||
self._discovered_devices[address] = (
|
||||
device.title or device.get_device_name() or discovery_info.name
|
||||
)
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices)}
|
||||
),
|
||||
)
|
||||
3
homeassistant/components/bluemaestro/const.py
Normal file
3
homeassistant/components/bluemaestro/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Constants for the BlueMaestro integration."""
|
||||
|
||||
DOMAIN = "bluemaestro"
|
||||
31
homeassistant/components/bluemaestro/device.py
Normal file
31
homeassistant/components/bluemaestro/device.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Support for BlueMaestro devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from bluemaestro_ble import DeviceKey, SensorDeviceInfo
|
||||
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothEntityKey,
|
||||
)
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
|
||||
|
||||
def device_key_to_bluetooth_entity_key(
|
||||
device_key: DeviceKey,
|
||||
) -> PassiveBluetoothEntityKey:
|
||||
"""Convert a device key to an entity key."""
|
||||
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
|
||||
|
||||
|
||||
def sensor_device_info_to_hass(
|
||||
sensor_device_info: SensorDeviceInfo,
|
||||
) -> DeviceInfo:
|
||||
"""Convert a bluemaestro device info to a sensor device info."""
|
||||
hass_device_info = DeviceInfo({})
|
||||
if sensor_device_info.name is not None:
|
||||
hass_device_info[ATTR_NAME] = sensor_device_info.name
|
||||
if sensor_device_info.manufacturer is not None:
|
||||
hass_device_info[ATTR_MANUFACTURER] = sensor_device_info.manufacturer
|
||||
if sensor_device_info.model is not None:
|
||||
hass_device_info[ATTR_MODEL] = sensor_device_info.model
|
||||
return hass_device_info
|
||||
16
homeassistant/components/bluemaestro/manifest.json
Normal file
16
homeassistant/components/bluemaestro/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"domain": "bluemaestro",
|
||||
"name": "BlueMaestro",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 307,
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"requirements": ["bluemaestro-ble==0.2.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
149
homeassistant/components/bluemaestro/sensor.py
Normal file
149
homeassistant/components/bluemaestro/sensor.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""Support for BlueMaestro sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
from bluemaestro_ble import (
|
||||
SensorDeviceClass as BlueMaestroSensorDeviceClass,
|
||||
SensorUpdate,
|
||||
Units,
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
PassiveBluetoothDataUpdate,
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
PRESSURE_MBAR,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
TEMP_CELSIUS,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .device import device_key_to_bluetooth_entity_key, sensor_device_info_to_hass
|
||||
|
||||
SENSOR_DESCRIPTIONS = {
|
||||
(BlueMaestroSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.BATTERY}_{Units.PERCENTAGE}",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
(BlueMaestroSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.HUMIDITY}_{Units.PERCENTAGE}",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.SIGNAL_STRENGTH,
|
||||
Units.SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.SIGNAL_STRENGTH}_{Units.SIGNAL_STRENGTH_DECIBELS_MILLIWATT}",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.TEMPERATURE,
|
||||
Units.TEMP_CELSIUS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.TEMPERATURE}_{Units.TEMP_CELSIUS}",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.DEW_POINT,
|
||||
Units.TEMP_CELSIUS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.DEW_POINT}_{Units.TEMP_CELSIUS}",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.PRESSURE,
|
||||
Units.PRESSURE_MBAR,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.PRESSURE}_{Units.PRESSURE_MBAR}",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
native_unit_of_measurement=PRESSURE_MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def sensor_update_to_bluetooth_data_update(
|
||||
sensor_update: SensorUpdate,
|
||||
) -> PassiveBluetoothDataUpdate:
|
||||
"""Convert a sensor update to a bluetooth data update."""
|
||||
return PassiveBluetoothDataUpdate(
|
||||
devices={
|
||||
device_id: sensor_device_info_to_hass(device_info)
|
||||
for device_id, device_info in sensor_update.devices.items()
|
||||
},
|
||||
entity_descriptions={
|
||||
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
|
||||
(description.device_class, description.native_unit_of_measurement)
|
||||
]
|
||||
for device_key, description in sensor_update.entity_descriptions.items()
|
||||
if description.device_class and description.native_unit_of_measurement
|
||||
},
|
||||
entity_data={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BlueMaestro BLE sensors."""
|
||||
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
BlueMaestroBluetoothSensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
|
||||
|
||||
class BlueMaestroBluetoothSensorEntity(
|
||||
PassiveBluetoothProcessorEntity[
|
||||
PassiveBluetoothDataProcessor[Optional[Union[float, int]]]
|
||||
],
|
||||
SensorEntity,
|
||||
):
|
||||
"""Representation of a BlueMaestro sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
"""Return the native value."""
|
||||
return self.processor.entity_data.get(self.entity_key)
|
||||
22
homeassistant/components/bluemaestro/strings.json
Normal file
22
homeassistant/components/bluemaestro/strings.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "[%key:component::bluetooth::config::flow_title%]",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:component::bluetooth::config::step::user::data::address%]"
|
||||
}
|
||||
},
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"not_supported": "Device not supported",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
22
homeassistant/components/bluemaestro/translations/en.json
Normal file
22
homeassistant/components/bluemaestro/translations/en.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Device is already configured",
|
||||
"already_in_progress": "Configuration flow is already in progress",
|
||||
"no_devices_found": "No devices found on the network",
|
||||
"not_supported": "Device not supported"
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"address": "Device"
|
||||
},
|
||||
"description": "Choose a device to setup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
from .const import DOMAIN # noqa: F401
|
||||
from .const import CONF_USE_BLUEPRINT, DOMAIN # noqa: F401
|
||||
from .errors import ( # noqa: F401
|
||||
BlueprintException,
|
||||
BlueprintWithNameException,
|
||||
|
||||
@@ -91,3 +91,11 @@ class FileAlreadyExists(BlueprintWithNameException):
|
||||
def __init__(self, domain: str, blueprint_name: str) -> None:
|
||||
"""Initialize blueprint exception."""
|
||||
super().__init__(domain, blueprint_name, "Blueprint already exists")
|
||||
|
||||
|
||||
class BlueprintInUse(BlueprintWithNameException):
|
||||
"""Error when a blueprint is in use."""
|
||||
|
||||
def __init__(self, domain: str, blueprint_name: str) -> None:
|
||||
"""Initialize blueprint exception."""
|
||||
super().__init__(domain, blueprint_name, "Blueprint in use")
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
@@ -35,6 +36,7 @@ from .const import (
|
||||
)
|
||||
from .errors import (
|
||||
BlueprintException,
|
||||
BlueprintInUse,
|
||||
FailedToLoad,
|
||||
FileAlreadyExists,
|
||||
InvalidBlueprint,
|
||||
@@ -183,11 +185,13 @@ class DomainBlueprints:
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
logger: logging.Logger,
|
||||
blueprint_in_use: Callable[[HomeAssistant, str], bool],
|
||||
) -> None:
|
||||
"""Initialize a domain blueprints instance."""
|
||||
self.hass = hass
|
||||
self.domain = domain
|
||||
self.logger = logger
|
||||
self._blueprint_in_use = blueprint_in_use
|
||||
self._blueprints: dict[str, Blueprint | None] = {}
|
||||
self._load_lock = asyncio.Lock()
|
||||
|
||||
@@ -302,6 +306,8 @@ class DomainBlueprints:
|
||||
|
||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||
"""Remove a blueprint file."""
|
||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
||||
raise BlueprintInUse(self.domain, blueprint_path)
|
||||
path = self.blueprint_folder / blueprint_path
|
||||
await self.hass.async_add_executor_job(path.unlink)
|
||||
self._blueprints[blueprint_path] = None
|
||||
|
||||
@@ -6,6 +6,8 @@ import logging
|
||||
import time
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from bleak import BleakError
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
|
||||
@@ -109,6 +111,13 @@ class ActiveBluetoothProcessorCoordinator(
|
||||
|
||||
try:
|
||||
update = await self._async_poll_data(self._last_service_info)
|
||||
except BleakError as exc:
|
||||
if self.last_poll_successful:
|
||||
self.logger.error(
|
||||
"%s: Bluetooth error whilst polling: %s", self.address, str(exc)
|
||||
)
|
||||
self.last_poll_successful = False
|
||||
return
|
||||
except Exception: # pylint: disable=broad-except
|
||||
if self.last_poll_successful:
|
||||
self.logger.exception("%s: Failure while polling", self.address)
|
||||
|
||||
@@ -54,6 +54,10 @@ if TYPE_CHECKING:
|
||||
|
||||
FILTER_UUIDS: Final = "UUIDs"
|
||||
|
||||
APPLE_MFR_ID: Final = 76
|
||||
APPLE_HOMEKIT_START_BYTE: Final = 0x06 # homekit_controller
|
||||
APPLE_DEVICE_ID_START_BYTE: Final = 0x10 # bluetooth_le_tracker
|
||||
APPLE_START_BYTES_WANTED: Final = {APPLE_DEVICE_ID_START_BYTE, APPLE_HOMEKIT_START_BYTE}
|
||||
|
||||
RSSI_SWITCH_THRESHOLD = 6
|
||||
|
||||
@@ -290,6 +294,19 @@ class BluetoothManager:
|
||||
than the source from the history or the timestamp
|
||||
in the history is older than 180s
|
||||
"""
|
||||
|
||||
# Pre-filter noisy apple devices as they can account for 20-35% of the
|
||||
# traffic on a typical network.
|
||||
advertisement_data = service_info.advertisement
|
||||
manufacturer_data = advertisement_data.manufacturer_data
|
||||
if (
|
||||
len(manufacturer_data) == 1
|
||||
and (apple_data := manufacturer_data.get(APPLE_MFR_ID))
|
||||
and apple_data[0] not in APPLE_START_BYTES_WANTED
|
||||
and not advertisement_data.service_data
|
||||
):
|
||||
return
|
||||
|
||||
device = service_info.device
|
||||
connectable = service_info.connectable
|
||||
address = device.address
|
||||
@@ -299,7 +316,6 @@ class BluetoothManager:
|
||||
return
|
||||
|
||||
self._history[address] = service_info
|
||||
advertisement_data = service_info.advertisement
|
||||
source = service_info.source
|
||||
|
||||
if connectable:
|
||||
@@ -311,12 +327,13 @@ class BluetoothManager:
|
||||
|
||||
matched_domains = self._integration_matcher.match_domains(service_info)
|
||||
_LOGGER.debug(
|
||||
"%s: %s %s connectable: %s match: %s",
|
||||
"%s: %s %s connectable: %s match: %s rssi: %s",
|
||||
source,
|
||||
address,
|
||||
advertisement_data,
|
||||
connectable,
|
||||
matched_domains,
|
||||
device.rssi,
|
||||
)
|
||||
|
||||
for match in self._callback_index.match_callbacks(service_info):
|
||||
@@ -367,11 +384,11 @@ class BluetoothManager:
|
||||
callback_matcher[CONNECTABLE] = matcher.get(CONNECTABLE, True)
|
||||
|
||||
connectable = callback_matcher[CONNECTABLE]
|
||||
self._callback_index.add_with_address(callback_matcher)
|
||||
self._callback_index.add_callback_matcher(callback_matcher)
|
||||
|
||||
@hass_callback
|
||||
def _async_remove_callback() -> None:
|
||||
self._callback_index.remove_with_address(callback_matcher)
|
||||
self._callback_index.remove_callback_matcher(callback_matcher)
|
||||
|
||||
# If we have history for the subscriber, we can trigger the callback
|
||||
# immediately with the last packet so the subscriber can see the
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
"dependencies": ["usb"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.16.0",
|
||||
"bluetooth-adapters==0.3.3",
|
||||
"bluetooth-auto-recovery==0.3.0"
|
||||
"bleak==0.17.0",
|
||||
"bleak-retry-connector==1.17.1",
|
||||
"bluetooth-adapters==0.4.1",
|
||||
"bluetooth-auto-recovery==0.3.3",
|
||||
"dbus-fast==1.4.0"
|
||||
],
|
||||
"codeowners": ["@bdraco"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -173,36 +173,40 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
self.service_data_uuid_set: set[str] = set()
|
||||
self.manufacturer_id_set: set[int] = set()
|
||||
|
||||
def add(self, matcher: _T) -> None:
|
||||
def add(self, matcher: _T) -> bool:
|
||||
"""Add a matcher to the index.
|
||||
|
||||
Matchers must end up only in one bucket.
|
||||
|
||||
We put them in the bucket that they are most likely to match.
|
||||
"""
|
||||
# Local name is the cheapest to match since its just a dict lookup
|
||||
if LOCAL_NAME in matcher:
|
||||
self.local_name.setdefault(
|
||||
_local_name_to_index_key(matcher[LOCAL_NAME]), []
|
||||
).append(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
# Manufacturer data is 2nd cheapest since its all ints
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id.setdefault(matcher[MANUFACTURER_ID], []).append(
|
||||
matcher
|
||||
)
|
||||
return True
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid.setdefault(matcher[SERVICE_UUID], []).append(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid.setdefault(matcher[SERVICE_DATA_UUID], []).append(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
return True
|
||||
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id.setdefault(matcher[MANUFACTURER_ID], []).append(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
return False
|
||||
|
||||
def remove(self, matcher: _T) -> None:
|
||||
def remove(self, matcher: _T) -> bool:
|
||||
"""Remove a matcher from the index.
|
||||
|
||||
Matchers only end up in one bucket, so once we have
|
||||
@@ -212,19 +216,21 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
self.local_name[_local_name_to_index_key(matcher[LOCAL_NAME])].remove(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid[matcher[SERVICE_UUID]].remove(matcher)
|
||||
return
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid[matcher[SERVICE_DATA_UUID]].remove(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id[matcher[MANUFACTURER_ID]].remove(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid[matcher[SERVICE_UUID]].remove(matcher)
|
||||
return True
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid[matcher[SERVICE_DATA_UUID]].remove(matcher)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def build(self) -> None:
|
||||
"""Rebuild the index sets."""
|
||||
@@ -235,33 +241,36 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
def match(self, service_info: BluetoothServiceInfoBleak) -> list[_T]:
|
||||
"""Check for a match."""
|
||||
matches = []
|
||||
if len(service_info.name) >= LOCAL_NAME_MIN_MATCH_LENGTH:
|
||||
if service_info.name and len(service_info.name) >= LOCAL_NAME_MIN_MATCH_LENGTH:
|
||||
for matcher in self.local_name.get(
|
||||
service_info.name[:LOCAL_NAME_MIN_MATCH_LENGTH], []
|
||||
):
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for service_data_uuid in self.service_data_uuid_set.intersection(
|
||||
service_info.service_data
|
||||
):
|
||||
for matcher in self.service_data_uuid[service_data_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.service_data_uuid_set and service_info.service_data:
|
||||
for service_data_uuid in self.service_data_uuid_set.intersection(
|
||||
service_info.service_data
|
||||
):
|
||||
for matcher in self.service_data_uuid[service_data_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for manufacturer_id in self.manufacturer_id_set.intersection(
|
||||
service_info.manufacturer_data
|
||||
):
|
||||
for matcher in self.manufacturer_id[manufacturer_id]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.manufacturer_id_set and service_info.manufacturer_data:
|
||||
for manufacturer_id in self.manufacturer_id_set.intersection(
|
||||
service_info.manufacturer_data
|
||||
):
|
||||
for matcher in self.manufacturer_id[manufacturer_id]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for service_uuid in self.service_uuid_set.intersection(
|
||||
service_info.service_uuids
|
||||
):
|
||||
for matcher in self.service_uuid[service_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.service_uuid_set and service_info.service_uuids:
|
||||
for service_uuid in self.service_uuid_set.intersection(
|
||||
service_info.service_uuids
|
||||
):
|
||||
for matcher in self.service_uuid[service_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
return matches
|
||||
|
||||
@@ -279,8 +288,11 @@ class BluetoothCallbackMatcherIndex(
|
||||
"""Initialize the matcher index."""
|
||||
super().__init__()
|
||||
self.address: dict[str, list[BluetoothCallbackMatcherWithCallback]] = {}
|
||||
self.connectable: list[BluetoothCallbackMatcherWithCallback] = []
|
||||
|
||||
def add_with_address(self, matcher: BluetoothCallbackMatcherWithCallback) -> None:
|
||||
def add_callback_matcher(
|
||||
self, matcher: BluetoothCallbackMatcherWithCallback
|
||||
) -> None:
|
||||
"""Add a matcher to the index.
|
||||
|
||||
Matchers must end up only in one bucket.
|
||||
@@ -291,10 +303,15 @@ class BluetoothCallbackMatcherIndex(
|
||||
self.address.setdefault(matcher[ADDRESS], []).append(matcher)
|
||||
return
|
||||
|
||||
super().add(matcher)
|
||||
self.build()
|
||||
if super().add(matcher):
|
||||
self.build()
|
||||
return
|
||||
|
||||
def remove_with_address(
|
||||
if CONNECTABLE in matcher:
|
||||
self.connectable.append(matcher)
|
||||
return
|
||||
|
||||
def remove_callback_matcher(
|
||||
self, matcher: BluetoothCallbackMatcherWithCallback
|
||||
) -> None:
|
||||
"""Remove a matcher from the index.
|
||||
@@ -306,8 +323,13 @@ class BluetoothCallbackMatcherIndex(
|
||||
self.address[matcher[ADDRESS]].remove(matcher)
|
||||
return
|
||||
|
||||
super().remove(matcher)
|
||||
self.build()
|
||||
if super().remove(matcher):
|
||||
self.build()
|
||||
return
|
||||
|
||||
if CONNECTABLE in matcher:
|
||||
self.connectable.remove(matcher)
|
||||
return
|
||||
|
||||
def match_callbacks(
|
||||
self, service_info: BluetoothServiceInfoBleak
|
||||
@@ -317,6 +339,9 @@ class BluetoothCallbackMatcherIndex(
|
||||
for matcher in self.address.get(service_info.address, []):
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
for matcher in self.connectable:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
return matches
|
||||
|
||||
|
||||
@@ -347,12 +372,9 @@ def ble_device_matches(
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
) -> bool:
|
||||
"""Check if a ble device and advertisement_data matches the matcher."""
|
||||
device = service_info.device
|
||||
|
||||
# Don't check address here since all callers already
|
||||
# check the address and we don't want to double check
|
||||
# since it would result in an unreachable reject case.
|
||||
|
||||
if matcher.get(CONNECTABLE, True) and not service_info.connectable:
|
||||
return False
|
||||
|
||||
@@ -379,7 +401,8 @@ def ble_device_matches(
|
||||
return False
|
||||
|
||||
if (local_name := matcher.get(LOCAL_NAME)) and (
|
||||
(device_name := advertisement_data.local_name or device.name) is None
|
||||
(device_name := advertisement_data.local_name or service_info.device.name)
|
||||
is None
|
||||
or not _memorized_fnmatch(
|
||||
device_name,
|
||||
local_name,
|
||||
|
||||
@@ -17,7 +17,7 @@ from bleak.backends.bluezdbus.advertisement_monitor import OrPattern
|
||||
from bleak.backends.bluezdbus.scanner import BlueZScannerArgs
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from dbus_next import InvalidMessageError
|
||||
from dbus_fast import InvalidMessageError
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import (
|
||||
|
||||
@@ -7,7 +7,13 @@ from functools import wraps
|
||||
import logging
|
||||
from typing import Any, Final, TypeVar
|
||||
|
||||
from pybravia import BraviaTV, BraviaTVError, BraviaTVNotFound
|
||||
from pybravia import (
|
||||
BraviaTV,
|
||||
BraviaTVConnectionError,
|
||||
BraviaTVConnectionTimeout,
|
||||
BraviaTVError,
|
||||
BraviaTVNotFound,
|
||||
)
|
||||
from typing_extensions import Concatenate, ParamSpec
|
||||
|
||||
from homeassistant.components.media_player.const import (
|
||||
@@ -130,6 +136,10 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER.debug("Update skipped, Bravia API service is reloading")
|
||||
return
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
except (BraviaTVConnectionError, BraviaTVConnectionTimeout):
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
_LOGGER.debug("Update skipped, Bravia TV is off")
|
||||
except BraviaTVError as err:
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "braviatv",
|
||||
"name": "Sony Bravia TV",
|
||||
"documentation": "https://www.home-assistant.io/integrations/braviatv",
|
||||
"requirements": ["pybravia==0.2.0"],
|
||||
"requirements": ["pybravia==0.2.2"],
|
||||
"codeowners": ["@bieniu", "@Drafteed"],
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bt_smarthub",
|
||||
"name": "BT Smart Hub",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bt_smarthub",
|
||||
"requirements": ["btsmarthub_devicelist==0.2.0"],
|
||||
"requirements": ["btsmarthub_devicelist==0.2.2"],
|
||||
"codeowners": ["@jxwolstenholme"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["btsmarthub_devicelist"]
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""The BThome Bluetooth integration."""
|
||||
"""The BTHome Bluetooth integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from bthome_ble import BThomeBluetoothDeviceData, SensorUpdate
|
||||
from bthome_ble import BTHomeBluetoothDeviceData, SensorUpdate
|
||||
from bthome_ble.parser import EncryptionScheme
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
def process_service_info(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
data: BThomeBluetoothDeviceData,
|
||||
data: BTHomeBluetoothDeviceData,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
) -> SensorUpdate:
|
||||
"""Process a BluetoothServiceInfoBleak, running side effects and returning sensor data."""
|
||||
@@ -40,14 +40,14 @@ def process_service_info(
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up BThome Bluetooth from a config entry."""
|
||||
"""Set up BTHome Bluetooth from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
|
||||
kwargs = {}
|
||||
if bindkey := entry.data.get("bindkey"):
|
||||
kwargs["bindkey"] = bytes.fromhex(bindkey)
|
||||
data = BThomeBluetoothDeviceData(**kwargs)
|
||||
data = BTHomeBluetoothDeviceData(**kwargs)
|
||||
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Config flow for BThome Bluetooth integration."""
|
||||
"""Config flow for BTHome Bluetooth integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from bthome_ble import BThomeBluetoothDeviceData as DeviceData
|
||||
from bthome_ble import BTHomeBluetoothDeviceData as DeviceData
|
||||
from bthome_ble.parser import EncryptionScheme
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -34,8 +34,8 @@ def _title(discovery_info: BluetoothServiceInfo, device: DeviceData) -> str:
|
||||
return device.title or device.get_device_name() or discovery_info.name
|
||||
|
||||
|
||||
class BThomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for BThome Bluetooth."""
|
||||
class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for BTHome Bluetooth."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@@ -68,7 +68,7 @@ class BThomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_get_encryption_key(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Enter a bindkey for an encrypted BThome device."""
|
||||
"""Enter a bindkey for an encrypted BTHome device."""
|
||||
assert self._discovery_info
|
||||
assert self._discovered_device
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
"""Constants for the BThome Bluetooth integration."""
|
||||
"""Constants for the BTHome Bluetooth integration."""
|
||||
|
||||
DOMAIN = "bthome"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for BThome Bluetooth devices."""
|
||||
"""Support for BTHome Bluetooth devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from bthome_ble import DeviceKey, SensorDeviceInfo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "bthome",
|
||||
"name": "BThome",
|
||||
"name": "BTHome",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"bluetooth": [
|
||||
@@ -13,7 +13,7 @@
|
||||
"service_data_uuid": "0000181e-0000-1000-8000-00805f9b34fb"
|
||||
}
|
||||
],
|
||||
"requirements": ["bthome-ble==0.5.2"],
|
||||
"requirements": ["bthome-ble==1.0.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@Ernst79"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for BThome sensors."""
|
||||
"""Support for BTHome sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
@@ -202,26 +202,26 @@ async def async_setup_entry(
|
||||
entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BThome BLE sensors."""
|
||||
"""Set up the BTHome BLE sensors."""
|
||||
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
BThomeBluetoothSensorEntity, async_add_entities
|
||||
BTHomeBluetoothSensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
|
||||
|
||||
class BThomeBluetoothSensorEntity(
|
||||
class BTHomeBluetoothSensorEntity(
|
||||
PassiveBluetoothProcessorEntity[
|
||||
PassiveBluetoothDataProcessor[Optional[Union[float, int]]]
|
||||
],
|
||||
SensorEntity,
|
||||
):
|
||||
"""Representation of a BThome BLE sensor."""
|
||||
"""Representation of a BTHome BLE sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
|
||||
@@ -47,7 +47,7 @@ SERVICE_CONFIGURE = "configure"
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL, default=DEFAULT_INITIAL): cv.positive_int,
|
||||
vol.Required(CONF_NAME): vol.All(cv.string, vol.Length(min=1)),
|
||||
@@ -57,16 +57,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MAXIMUM): vol.Any(None, vol.Coerce(int)),
|
||||
vol.Optional(CONF_MINIMUM): vol.Any(None, vol.Coerce(int)),
|
||||
vol.Optional(CONF_RESTORE): cv.boolean,
|
||||
vol.Optional(CONF_STEP): cv.positive_int,
|
||||
}
|
||||
|
||||
|
||||
def _none_to_empty_dict(value):
|
||||
if value is None:
|
||||
@@ -128,7 +118,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment")
|
||||
@@ -152,12 +142,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class CounterStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -166,8 +155,8 @@ class CounterStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class Counter(RestoreEntity):
|
||||
|
||||
@@ -43,6 +43,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
except AuthenticationRequired as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
if not hass.data[DOMAIN]:
|
||||
async_setup_services(hass)
|
||||
|
||||
gateway = hass.data[DOMAIN][config_entry.entry_id] = DeconzGateway(
|
||||
hass, config_entry, api
|
||||
)
|
||||
@@ -53,9 +56,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
await async_setup_events(gateway)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
if len(hass.data[DOMAIN]) == 1:
|
||||
async_setup_services(hass)
|
||||
|
||||
api.start()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
|
||||
@@ -11,8 +11,9 @@
|
||||
"dhcp",
|
||||
"energy",
|
||||
"frontend",
|
||||
"homeassistant_alerts",
|
||||
"hardware",
|
||||
"history",
|
||||
"homeassistant_alerts",
|
||||
"input_boolean",
|
||||
"input_button",
|
||||
"input_datetime",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "dhcp",
|
||||
"name": "DHCP Discovery",
|
||||
"documentation": "https://www.home-assistant.io/integrations/dhcp",
|
||||
"requirements": ["scapy==2.4.5", "aiodiscover==1.4.11"],
|
||||
"requirements": ["scapy==2.4.5", "aiodiscover==1.4.13"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"quality_scale": "internal",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -29,7 +29,7 @@ from .const import DOMAIN, ECOBEE_MODEL_TO_NAME, MANUFACTURER
|
||||
class EcobeeSensorEntityDescriptionMixin:
|
||||
"""Represent the required ecobee entity description attributes."""
|
||||
|
||||
runtime_key: str
|
||||
runtime_key: str | None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -46,7 +46,7 @@ SENSOR_TYPES: tuple[EcobeeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=TEMP_FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
runtime_key="actualTemperature",
|
||||
runtime_key=None,
|
||||
),
|
||||
EcobeeSensorEntityDescription(
|
||||
key="humidity",
|
||||
@@ -54,7 +54,7 @@ SENSOR_TYPES: tuple[EcobeeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
runtime_key="actualHumidity",
|
||||
runtime_key=None,
|
||||
),
|
||||
EcobeeSensorEntityDescription(
|
||||
key="co2PPM",
|
||||
@@ -194,6 +194,11 @@ class EcobeeSensor(SensorEntity):
|
||||
for item in sensor["capability"]:
|
||||
if item["type"] != self.entity_description.key:
|
||||
continue
|
||||
thermostat = self.data.ecobee.get_thermostat(self.index)
|
||||
self._state = thermostat["runtime"][self.entity_description.runtime_key]
|
||||
if self.entity_description.runtime_key is None:
|
||||
self._state = item["value"]
|
||||
else:
|
||||
thermostat = self.data.ecobee.get_thermostat(self.index)
|
||||
self._state = thermostat["runtime"][
|
||||
self.entity_description.runtime_key
|
||||
]
|
||||
break
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
"domain": "ecovacs",
|
||||
"name": "Ecovacs",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"requirements": ["sucks==0.9.4"],
|
||||
"codeowners": ["@OverloadUT"],
|
||||
"requirements": ["py-sucks==0.9.8"],
|
||||
"codeowners": ["@OverloadUT", "@mib1185"],
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks"]
|
||||
}
|
||||
|
||||
@@ -44,6 +44,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
|
||||
@@ -68,4 +68,4 @@ class EcowittBinarySensorEntity(EcowittEntity, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.ecowitt.value > 0
|
||||
return bool(self.ecowitt.value)
|
||||
|
||||
@@ -25,13 +25,13 @@ async def async_get_device_diagnostics(
|
||||
"device": {
|
||||
"name": station.station,
|
||||
"model": station.model,
|
||||
"frequency": station.frequency,
|
||||
"frequency": station.frequence,
|
||||
"version": station.version,
|
||||
},
|
||||
"raw": ecowitt.last_values[station_id],
|
||||
"sensors": {
|
||||
sensor.key: sensor.value
|
||||
for sensor in station.sensors
|
||||
for sensor in ecowitt.sensors.values()
|
||||
if sensor.station.key == station_id
|
||||
},
|
||||
}
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
"name": "Ecowitt",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"requirements": ["aioecowitt==2022.08.3"],
|
||||
"dependencies": ["webhook"],
|
||||
"requirements": ["aioecowitt==2022.09.1"],
|
||||
"codeowners": ["@pvizeli"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
"""Support for Ecowitt Weather Stations."""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioecowitt import EcoWittListener, EcoWittSensor, EcoWittSensorTypes
|
||||
@@ -242,6 +245,6 @@ class EcowittSensorEntity(EcowittEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.ecowitt.value
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.const import (
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
ENERGY_MEGA_WATT_HOUR,
|
||||
ENERGY_WATT_HOUR,
|
||||
VOLUME_CUBIC_FEET,
|
||||
VOLUME_CUBIC_METERS,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -44,7 +45,7 @@ SUPPORTED_STATE_CLASSES = [
|
||||
SensorStateClass.TOTAL_INCREASING,
|
||||
]
|
||||
VALID_ENERGY_UNITS = [ENERGY_WATT_HOUR, ENERGY_KILO_WATT_HOUR, ENERGY_MEGA_WATT_HOUR]
|
||||
VALID_ENERGY_UNITS_GAS = [VOLUME_CUBIC_METERS] + VALID_ENERGY_UNITS
|
||||
VALID_ENERGY_UNITS_GAS = [VOLUME_CUBIC_FEET, VOLUME_CUBIC_METERS] + VALID_ENERGY_UNITS
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Epson",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/epson",
|
||||
"requirements": ["epson-projector==0.4.6"],
|
||||
"requirements": ["epson-projector==0.5.0"],
|
||||
"codeowners": ["@pszafer"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["epson_projector"]
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from epson_projector import Projector
|
||||
from epson_projector import Projector, ProjectorUnavailableError
|
||||
from epson_projector.const import (
|
||||
BACK,
|
||||
BUSY,
|
||||
@@ -20,7 +20,6 @@ from epson_projector.const import (
|
||||
POWER,
|
||||
SOURCE,
|
||||
SOURCE_LIST,
|
||||
STATE_UNAVAILABLE as EPSON_STATE_UNAVAILABLE,
|
||||
TURN_OFF,
|
||||
TURN_ON,
|
||||
VOL_DOWN,
|
||||
@@ -123,11 +122,16 @@ class EpsonProjectorMediaPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state of device."""
|
||||
power_state = await self._projector.get_power()
|
||||
_LOGGER.debug("Projector status: %s", power_state)
|
||||
if not power_state or power_state == EPSON_STATE_UNAVAILABLE:
|
||||
try:
|
||||
power_state = await self._projector.get_power()
|
||||
except ProjectorUnavailableError as ex:
|
||||
_LOGGER.debug("Projector is unavailable: %s", ex)
|
||||
self._attr_available = False
|
||||
return
|
||||
if not power_state:
|
||||
self._attr_available = False
|
||||
return
|
||||
_LOGGER.debug("Projector status: %s", power_state)
|
||||
self._attr_available = True
|
||||
if power_state == EPSON_CODES[POWER]:
|
||||
self._attr_state = STATE_ON
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||
"requirements": ["flux_led==0.28.31"],
|
||||
"requirements": ["flux_led==0.28.32"],
|
||||
"quality_scale": "platinum",
|
||||
"codeowners": ["@icemanch", "@bdraco"],
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20220901.0"],
|
||||
"requirements": ["home-assistant-frontend==20220907.2"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -51,6 +51,7 @@ SETTINGS_TO_REDACT = {
|
||||
"sebExamKey",
|
||||
"sebConfigKey",
|
||||
"kioskPinEnc",
|
||||
"remoteAdminPasswordEnc",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -17,6 +17,11 @@
|
||||
"service_uuid": "00008351-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 57391,
|
||||
"service_uuid": "00008351-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 18994,
|
||||
"service_uuid": "00008551-0000-1000-8000-00805f9b34fb",
|
||||
@@ -53,7 +58,7 @@
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"requirements": ["govee-ble==0.17.1"],
|
||||
"requirements": ["govee-ble==0.17.3"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -143,7 +143,6 @@ class HistoryStatsSensorBase(
|
||||
class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
"""A HistoryStats sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.DURATION
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
@@ -157,6 +156,8 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
||||
self._type = sensor_type
|
||||
self._process_update()
|
||||
if self._type == CONF_TYPE_TIME:
|
||||
self._attr_device_class = SensorDeviceClass.DURATION
|
||||
|
||||
@callback
|
||||
def _process_update(self) -> None:
|
||||
|
||||
@@ -423,7 +423,7 @@ class HKDevice:
|
||||
if self._polling_interval_remover:
|
||||
self._polling_interval_remover()
|
||||
|
||||
await self.pairing.close()
|
||||
await self.pairing.shutdown()
|
||||
|
||||
await self.hass.config_entries.async_unload_platforms(
|
||||
self.config_entry, self.platforms
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit==1.5.1"],
|
||||
"requirements": ["aiohomekit==1.5.9"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."],
|
||||
"bluetooth": [{ "manufacturer_id": 76, "manufacturer_data_start": [6] }],
|
||||
"dependencies": ["bluetooth", "zeroconf"],
|
||||
|
||||
@@ -37,20 +37,25 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_INITIAL = "initial"
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: cv.schema_with_slug_keys(vol.Any(UPDATE_FIELDS, None))},
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
},
|
||||
None,
|
||||
)
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -62,12 +67,11 @@ STORAGE_VERSION = 1
|
||||
class InputBooleanStorageCollection(collection.StorageCollection):
|
||||
"""Input boolean collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -76,8 +80,8 @@ class InputBooleanStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
@bind_hass
|
||||
@@ -118,7 +122,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
|
||||
@@ -30,18 +30,23 @@ DOMAIN = "input_button"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: cv.schema_with_slug_keys(vol.Any(UPDATE_FIELDS, None))},
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
},
|
||||
None,
|
||||
)
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -53,12 +58,11 @@ STORAGE_VERSION = 1
|
||||
class InputButtonStorageCollection(collection.StorageCollection):
|
||||
"""Input button collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> vol.Schema:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -67,8 +71,8 @@ class InputButtonStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -103,7 +107,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
|
||||
@@ -61,20 +61,13 @@ def validate_set_datetime_attrs(config):
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_HAS_DATE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_HAS_TIME, default=False): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_HAS_DATE): cv.boolean,
|
||||
vol.Optional(CONF_HAS_TIME): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
}
|
||||
|
||||
|
||||
def has_date_or_time(conf):
|
||||
@@ -167,7 +160,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -213,12 +206,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class DateTimeStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, has_date_or_time))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, has_date_or_time))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -227,8 +219,8 @@ class DateTimeStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return has_date_or_time({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputDatetime(RestoreEntity):
|
||||
|
||||
@@ -65,7 +65,7 @@ def _cv_input_number(cfg):
|
||||
return cfg
|
||||
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Required(CONF_MIN): vol.Coerce(float),
|
||||
vol.Required(CONF_MAX): vol.Coerce(float),
|
||||
@@ -76,17 +76,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_MODE, default=MODE_SLIDER): vol.In([MODE_BOX, MODE_SLIDER]),
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN): vol.Coerce(float),
|
||||
vol.Optional(CONF_MAX): vol.Coerce(float),
|
||||
vol.Optional(CONF_INITIAL): vol.Coerce(float),
|
||||
vol.Optional(CONF_STEP): vol.All(vol.Coerce(float), vol.Range(min=1e-9)),
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.In([MODE_BOX, MODE_SLIDER]),
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
@@ -148,7 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -184,22 +173,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class NumberStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_number))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_number))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
"""Suggest an ID based on the config."""
|
||||
return info[CONF_NAME]
|
||||
|
||||
async def _async_load_data(self) -> dict | None:
|
||||
"""Load the data.
|
||||
|
||||
A past bug caused frontend to add initial value to all input numbers.
|
||||
This drops that.
|
||||
"""
|
||||
data = await super()._async_load_data()
|
||||
|
||||
if data is None:
|
||||
return data
|
||||
|
||||
for number in data["items"]:
|
||||
number.pop(CONF_INITIAL, None)
|
||||
|
||||
return data
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_number({**data, **update_data})
|
||||
update_data = self.SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputNumber(RestoreEntity):
|
||||
|
||||
@@ -56,7 +56,7 @@ def _unique(options: Any) -> Any:
|
||||
raise HomeAssistantError("Duplicate options are not allowed") from exc
|
||||
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), _unique, [cv.string]
|
||||
@@ -64,14 +64,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIONS): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), _unique, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
|
||||
def _remove_duplicates(options: list[str], name: str | None) -> list[str]:
|
||||
@@ -172,7 +164,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -238,12 +230,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InputSelectStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_select))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_select))
|
||||
|
||||
async def _process_create_data(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate the config is valid."""
|
||||
return cast(dict[str, Any], self.CREATE_SCHEMA(data))
|
||||
return cast(dict[str, Any], self.CREATE_UPDATE_SCHEMA(data))
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict[str, Any]) -> str:
|
||||
@@ -254,8 +245,8 @@ class InputSelectStorageCollection(collection.StorageCollection):
|
||||
self, data: dict[str, Any], update_data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_select({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputSelect(SelectEntity, RestoreEntity):
|
||||
|
||||
@@ -51,7 +51,7 @@ SERVICE_SET_VALUE = "set_value"
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
@@ -61,16 +61,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In([MODE_TEXT, MODE_PASSWORD]),
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.In([MODE_TEXT, MODE_PASSWORD]),
|
||||
}
|
||||
|
||||
|
||||
def _cv_input_text(cfg):
|
||||
@@ -147,7 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -177,12 +167,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InputTextStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_text))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_text))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -191,8 +180,8 @@ class InputTextStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_text({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputText(RestoreEntity):
|
||||
|
||||
@@ -1,19 +1,61 @@
|
||||
"""Component for the Portuguese weather service - IPMA."""
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
from pyipma import IPMAException
|
||||
from pyipma.api import IPMA_API
|
||||
from pyipma.location import Location
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .config_flow import IpmaFlowHandler # noqa: F401
|
||||
from .const import DOMAIN # noqa: F401
|
||||
from .const import DATA_API, DATA_LOCATION, DOMAIN
|
||||
|
||||
DEFAULT_NAME = "ipma"
|
||||
|
||||
PLATFORMS = [Platform.WEATHER]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_get_api(hass):
|
||||
"""Get the pyipma api object."""
|
||||
websession = async_get_clientsession(hass)
|
||||
return IPMA_API(websession)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up IPMA station as config entry."""
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
latitude = config_entry.data[CONF_LATITUDE]
|
||||
longitude = config_entry.data[CONF_LONGITUDE]
|
||||
|
||||
api = await async_get_api(hass)
|
||||
try:
|
||||
async with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Initializing for coordinates %s, %s -> station %s (%d, %d)",
|
||||
latitude,
|
||||
longitude,
|
||||
location.station,
|
||||
location.id_station,
|
||||
location.global_id_local,
|
||||
)
|
||||
except IPMAException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not get location for ({latitude},{longitude})"
|
||||
) from err
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][config_entry.entry_id] = {DATA_API: api, DATA_LOCATION: location}
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -6,3 +6,6 @@ DOMAIN = "ipma"
|
||||
HOME_LOCATION_NAME = "Home"
|
||||
|
||||
ENTITY_ID_SENSOR_FORMAT_HOME = f"{WEATHER_DOMAIN}.ipma_{HOME_LOCATION_NAME}"
|
||||
|
||||
DATA_LOCATION = "location"
|
||||
DATA_API = "api"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Instituto Portugu\u00eas do Mar e Atmosfera (IPMA)",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ipma",
|
||||
"requirements": ["pyipma==3.0.2"],
|
||||
"requirements": ["pyipma==3.0.4"],
|
||||
"codeowners": ["@dgomes", "@abmantis"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["geopy", "pyipma"]
|
||||
|
||||
@@ -48,11 +48,12 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.sun import is_up
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import DATA_API, DATA_LOCATION, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTION = "Instituto Português do Mar e Atmosfera"
|
||||
@@ -95,13 +96,10 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add a weather entity from a config_entry."""
|
||||
latitude = config_entry.data[CONF_LATITUDE]
|
||||
longitude = config_entry.data[CONF_LONGITUDE]
|
||||
api = hass.data[DOMAIN][config_entry.entry_id][DATA_API]
|
||||
location = hass.data[DOMAIN][config_entry.entry_id][DATA_LOCATION]
|
||||
mode = config_entry.data[CONF_MODE]
|
||||
|
||||
api = await async_get_api(hass)
|
||||
location = await async_get_location(hass, api, latitude, longitude)
|
||||
|
||||
# Migrate old unique_id
|
||||
@callback
|
||||
def _async_migrator(entity_entry: entity_registry.RegistryEntry):
|
||||
@@ -127,29 +125,6 @@ async def async_setup_entry(
|
||||
async_add_entities([IPMAWeather(location, api, config_entry.data)], True)
|
||||
|
||||
|
||||
async def async_get_api(hass):
|
||||
"""Get the pyipma api object."""
|
||||
websession = async_get_clientsession(hass)
|
||||
return IPMA_API(websession)
|
||||
|
||||
|
||||
async def async_get_location(hass, api, latitude, longitude):
|
||||
"""Retrieve pyipma location, location name to be used as the entity name."""
|
||||
async with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Initializing for coordinates %s, %s -> station %s (%d, %d)",
|
||||
latitude,
|
||||
longitude,
|
||||
location.station,
|
||||
location.id_station,
|
||||
location.global_id_local,
|
||||
)
|
||||
|
||||
return location
|
||||
|
||||
|
||||
class IPMAWeather(WeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import pyiss
|
||||
@@ -18,7 +18,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -27,31 +27,25 @@ class IssData:
|
||||
|
||||
number_of_people_in_space: int
|
||||
current_location: dict[str, str]
|
||||
is_above: bool
|
||||
next_rise: datetime
|
||||
|
||||
|
||||
def update(iss: pyiss.ISS, latitude: float, longitude: float) -> IssData:
|
||||
def update(iss: pyiss.ISS) -> IssData:
|
||||
"""Retrieve data from the pyiss API."""
|
||||
return IssData(
|
||||
number_of_people_in_space=iss.number_of_people_in_space(),
|
||||
current_location=iss.current_location(),
|
||||
is_above=iss.is_ISS_above(latitude, longitude),
|
||||
next_rise=iss.next_rise(latitude, longitude),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
latitude = hass.config.latitude
|
||||
longitude = hass.config.longitude
|
||||
|
||||
iss = pyiss.ISS()
|
||||
|
||||
async def async_update() -> IssData:
|
||||
try:
|
||||
return await hass.async_add_executor_job(update, iss, latitude, longitude)
|
||||
return await hass.async_add_executor_job(update, iss)
|
||||
except (HTTPError, requests.exceptions.ConnectionError) as ex:
|
||||
raise UpdateFailed("Unable to retrieve data") from ex
|
||||
|
||||
|
||||
@@ -7,9 +7,10 @@ from homeassistant.const import CONF_NAME, CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .binary_sensor import DEFAULT_NAME
|
||||
from .const import DOMAIN
|
||||
|
||||
DEFAULT_NAME = "ISS"
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for iss component."""
|
||||
@@ -30,10 +31,6 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Check if location have been defined.
|
||||
if not self.hass.config.latitude and not self.hass.config.longitude:
|
||||
return self.async_abort(reason="latitude_longitude_not_defined")
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=user_input.get(CONF_NAME, DEFAULT_NAME),
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Support for iss binary sensor."""
|
||||
"""Support for iss sensor."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -19,12 +19,6 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ISS_NEXT_RISE = "next_rise"
|
||||
ATTR_ISS_NUMBER_PEOPLE_SPACE = "number_of_people_in_space"
|
||||
|
||||
DEFAULT_NAME = "ISS"
|
||||
DEFAULT_DEVICE_CLASS = "visible"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -37,15 +31,11 @@ async def async_setup_entry(
|
||||
name = entry.title
|
||||
show_on_map = entry.options.get(CONF_SHOW_ON_MAP, False)
|
||||
|
||||
async_add_entities([IssBinarySensor(coordinator, name, show_on_map)])
|
||||
async_add_entities([IssSensor(coordinator, name, show_on_map)])
|
||||
|
||||
|
||||
class IssBinarySensor(
|
||||
CoordinatorEntity[DataUpdateCoordinator[IssData]], BinarySensorEntity
|
||||
):
|
||||
"""Implementation of the ISS binary sensor."""
|
||||
|
||||
_attr_device_class = DEFAULT_DEVICE_CLASS
|
||||
class IssSensor(CoordinatorEntity[DataUpdateCoordinator[IssData]], SensorEntity):
|
||||
"""Implementation of the ISS sensor."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator[IssData], name: str, show: bool
|
||||
@@ -57,17 +47,14 @@ class IssBinarySensor(
|
||||
self._show_on_map = show
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.coordinator.data.is_above is True
|
||||
def native_value(self) -> int:
|
||||
"""Return number of people in space."""
|
||||
return self.coordinator.data.number_of_people_in_space
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
attrs = {
|
||||
ATTR_ISS_NUMBER_PEOPLE_SPACE: self.coordinator.data.number_of_people_in_space,
|
||||
ATTR_ISS_NEXT_RISE: self.coordinator.data.next_rise,
|
||||
}
|
||||
attrs = {}
|
||||
if self._show_on_map:
|
||||
attrs[ATTR_LONGITUDE] = self.coordinator.data.current_location.get(
|
||||
"longitude"
|
||||
@@ -75,7 +75,7 @@ class ISYEntity(Entity):
|
||||
# New state attributes may be available, update the state.
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.hass.bus.fire("isy994_control", event_data)
|
||||
self.hass.bus.async_fire("isy994_control", event_data)
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo | None:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "lametric",
|
||||
"name": "LaMetric",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lametric",
|
||||
"requirements": ["demetriek==0.2.2"],
|
||||
"requirements": ["demetriek==0.2.4"],
|
||||
"codeowners": ["@robbiet480", "@frenck"],
|
||||
"iot_class": "local_polling",
|
||||
"dependencies": ["application_credentials"],
|
||||
|
||||
@@ -21,6 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_CYCLES, CONF_ICON_TYPE, CONF_PRIORITY, CONF_SOUND, DOMAIN
|
||||
from .coordinator import LaMetricDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
@@ -31,8 +32,10 @@ async def async_get_service(
|
||||
"""Get the LaMetric notification service."""
|
||||
if discovery_info is None:
|
||||
return None
|
||||
lametric: LaMetricDevice = hass.data[DOMAIN][discovery_info["entry_id"]]
|
||||
return LaMetricNotificationService(lametric)
|
||||
coordinator: LaMetricDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
discovery_info["entry_id"]
|
||||
]
|
||||
return LaMetricNotificationService(coordinator.lametric)
|
||||
|
||||
|
||||
class LaMetricNotificationService(BaseNotificationService):
|
||||
|
||||
@@ -31,9 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
_LOGGER.info("Polling on %s", entry.data[CONF_DEVICE])
|
||||
return await hass.async_add_executor_job(api.read)
|
||||
|
||||
# No automatic polling and no initial refresh of data is being done at this point,
|
||||
# to prevent battery drain. The user will have to do it manually.
|
||||
|
||||
# Polling is only daily to prevent battery drain.
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_DEVICE
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, ULTRAHEAT_TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,6 +43,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
dev_path = await self.hass.async_add_executor_job(
|
||||
get_serial_by_id, user_input[CONF_DEVICE]
|
||||
)
|
||||
_LOGGER.debug("Using this path : %s", dev_path)
|
||||
|
||||
try:
|
||||
return await self.validate_and_create_entry(dev_path)
|
||||
@@ -76,6 +77,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Try to connect to the device path and return an entry."""
|
||||
model, device_number = await self.validate_ultraheat(dev_path)
|
||||
|
||||
_LOGGER.debug("Got model %s and device_number %s", model, device_number)
|
||||
await self.async_set_unique_id(device_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
data = {
|
||||
@@ -94,7 +96,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
reader = UltraheatReader(port)
|
||||
heat_meter = HeatMeterService(reader)
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
async with async_timeout.timeout(ULTRAHEAT_TIMEOUT):
|
||||
# validate and retrieve the model and device number for a unique id
|
||||
data = await self.hass.async_add_executor_job(heat_meter.read)
|
||||
_LOGGER.debug("Got data from Ultraheat API: %s", data)
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.helpers.entity import EntityCategory
|
||||
DOMAIN = "landisgyr_heat_meter"
|
||||
|
||||
GJ_TO_MWH = 0.277778 # conversion factor
|
||||
ULTRAHEAT_TIMEOUT = 30 # reading the IR port can take some time
|
||||
|
||||
HEAT_METER_SENSOR_TYPES = (
|
||||
SensorEntityDescription(
|
||||
|
||||
@@ -6,7 +6,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE, get_device
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher
|
||||
@@ -27,7 +27,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up LED BLE from a config entry."""
|
||||
address: str = entry.data[CONF_ADDRESS]
|
||||
ble_device = bluetooth.async_ble_device_from_address(hass, address.upper(), True)
|
||||
ble_device = bluetooth.async_ble_device_from_address(
|
||||
hass, address.upper(), True
|
||||
) or await get_device(address)
|
||||
if not ble_device:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not find LED BLE device with address {address}"
|
||||
|
||||
@@ -48,12 +48,12 @@ class LEDBLEEntity(CoordinatorEntity, LightEntity):
|
||||
"""Initialize an ledble light."""
|
||||
super().__init__(coordinator)
|
||||
self._device = device
|
||||
self._attr_unique_id = device._address
|
||||
self._attr_unique_id = device.address
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=name,
|
||||
model=hex(device.model_num),
|
||||
sw_version=hex(device.version_num),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, device._address)},
|
||||
connections={(dr.CONNECTION_BLUETOOTH, device.address)},
|
||||
)
|
||||
self._async_update_attrs()
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LED BLE",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ble_ble",
|
||||
"requirements": ["led-ble==0.5.4"],
|
||||
"requirements": ["led-ble==0.10.1"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"bluetooth": [
|
||||
@@ -11,7 +11,10 @@
|
||||
{ "local_name": "BLE-LED*" },
|
||||
{ "local_name": "LEDBLE*" },
|
||||
{ "local_name": "Triones*" },
|
||||
{ "local_name": "LEDBlue*" }
|
||||
{ "local_name": "LEDBlue*" },
|
||||
{ "local_name": "Dream~*" },
|
||||
{ "local_name": "QHM-*" },
|
||||
{ "local_name": "AP-*" }
|
||||
],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Config flow to configure the LG Soundbar integration."""
|
||||
from queue import Queue
|
||||
from queue import Full, Queue
|
||||
import socket
|
||||
|
||||
import temescal
|
||||
@@ -20,18 +20,29 @@ def test_connect(host, port):
|
||||
uuid_q = Queue(maxsize=1)
|
||||
name_q = Queue(maxsize=1)
|
||||
|
||||
def queue_add(attr_q, data):
|
||||
try:
|
||||
attr_q.put_nowait(data)
|
||||
except Full:
|
||||
pass
|
||||
|
||||
def msg_callback(response):
|
||||
if response["msg"] == "MAC_INFO_DEV" and "s_uuid" in response["data"]:
|
||||
uuid_q.put_nowait(response["data"]["s_uuid"])
|
||||
if (
|
||||
response["msg"] in ["MAC_INFO_DEV", "PRODUCT_INFO"]
|
||||
and "s_uuid" in response["data"]
|
||||
):
|
||||
queue_add(uuid_q, response["data"]["s_uuid"])
|
||||
if (
|
||||
response["msg"] == "SPK_LIST_VIEW_INFO"
|
||||
and "s_user_name" in response["data"]
|
||||
):
|
||||
name_q.put_nowait(response["data"]["s_user_name"])
|
||||
queue_add(name_q, response["data"]["s_user_name"])
|
||||
|
||||
try:
|
||||
connection = temescal.temescal(host, port=port, callback=msg_callback)
|
||||
connection.get_mac_info()
|
||||
if uuid_q.empty():
|
||||
connection.get_product_info()
|
||||
connection.get_info()
|
||||
details = {"name": name_q.get(timeout=10), "uuid": uuid_q.get(timeout=10)}
|
||||
return details
|
||||
|
||||
@@ -57,7 +57,7 @@ CONFIG_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.LIGHT]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.LIGHT]
|
||||
DISCOVERY_INTERVAL = timedelta(minutes=15)
|
||||
MIGRATION_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
70
homeassistant/components/lifx/binary_sensor.py
Normal file
70
homeassistant/components/lifx/binary_sensor.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Binary sensor entities for LIFX integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, HEV_CYCLE_STATE
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .entity import LIFXEntity
|
||||
from .util import lifx_features
|
||||
|
||||
HEV_CYCLE_STATE_SENSOR = BinarySensorEntityDescription(
|
||||
key=HEV_CYCLE_STATE,
|
||||
name="Clean Cycle",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up LIFX from a config entry."""
|
||||
coordinator: LIFXUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
if lifx_features(coordinator.device)["hev"]:
|
||||
async_add_entities(
|
||||
[
|
||||
LIFXHevCycleBinarySensorEntity(
|
||||
coordinator=coordinator, description=HEV_CYCLE_STATE_SENSOR
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class LIFXHevCycleBinarySensorEntity(LIFXEntity, BinarySensorEntity):
|
||||
"""LIFX HEV cycle state binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LIFXUpdateCoordinator,
|
||||
description: BinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialise the sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_name = description.name
|
||||
self._attr_unique_id = f"{coordinator.serial_number}_{description.key}"
|
||||
self._async_update_attrs()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Handle coordinator updates."""
|
||||
self._attr_is_on = self.coordinator.async_get_hev_cycle_state()
|
||||
@@ -29,6 +29,15 @@ IDENTIFY_WAVEFORM = {
|
||||
IDENTIFY = "identify"
|
||||
RESTART = "restart"
|
||||
|
||||
ATTR_DURATION = "duration"
|
||||
ATTR_INDICATION = "indication"
|
||||
ATTR_INFRARED = "infrared"
|
||||
ATTR_POWER = "power"
|
||||
ATTR_REMAINING = "remaining"
|
||||
ATTR_ZONES = "zones"
|
||||
|
||||
HEV_CYCLE_STATE = "hev_cycle_state"
|
||||
|
||||
DATA_LIFX_MANAGER = "lifx_manager"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ATTR_REMAINING,
|
||||
IDENTIFY_WAVEFORM,
|
||||
MESSAGE_RETRIES,
|
||||
MESSAGE_TIMEOUT,
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
from .util import async_execute_lifx, get_real_mac_addr, lifx_features
|
||||
|
||||
REQUEST_REFRESH_DELAY = 0.35
|
||||
LIFX_IDENTIFY_DELAY = 3.0
|
||||
|
||||
|
||||
class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
@@ -91,7 +93,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
# Turn the bulb on first, flash for 3 seconds, then turn off
|
||||
await self.async_set_power(state=True, duration=1)
|
||||
await self.async_set_waveform_optional(value=IDENTIFY_WAVEFORM)
|
||||
await asyncio.sleep(3)
|
||||
await asyncio.sleep(LIFX_IDENTIFY_DELAY)
|
||||
await self.async_set_power(state=False, duration=1)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
@@ -101,26 +103,25 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
self.device.get_hostfirmware()
|
||||
if self.device.product is None:
|
||||
self.device.get_version()
|
||||
try:
|
||||
response = await async_execute_lifx(self.device.get_color)
|
||||
except asyncio.TimeoutError as ex:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch state from device: {self.device.ip_addr}"
|
||||
) from ex
|
||||
response = await async_execute_lifx(self.device.get_color)
|
||||
|
||||
if self.device.product is None:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch get version from device: {self.device.ip_addr}"
|
||||
)
|
||||
|
||||
# device.mac_addr is not the mac_address, its the serial number
|
||||
if self.device.mac_addr == TARGET_ANY:
|
||||
self.device.mac_addr = response.target_addr
|
||||
|
||||
if lifx_features(self.device)["multizone"]:
|
||||
try:
|
||||
await self.async_update_color_zones()
|
||||
except asyncio.TimeoutError as ex:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch zones from device: {self.device.ip_addr}"
|
||||
) from ex
|
||||
await self.async_update_color_zones()
|
||||
|
||||
if lifx_features(self.device)["hev"]:
|
||||
if self.device.hev_cycle_configuration is None:
|
||||
self.device.get_hev_configuration()
|
||||
|
||||
await self.async_get_hev_cycle()
|
||||
|
||||
async def async_update_color_zones(self) -> None:
|
||||
"""Get updated color information for each zone."""
|
||||
@@ -138,6 +139,17 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
if zone == top - 1:
|
||||
zone -= 1
|
||||
|
||||
def async_get_hev_cycle_state(self) -> bool | None:
|
||||
"""Return the current HEV cycle state."""
|
||||
if self.device.hev_cycle is None:
|
||||
return None
|
||||
return bool(self.device.hev_cycle.get(ATTR_REMAINING, 0) > 0)
|
||||
|
||||
async def async_get_hev_cycle(self) -> None:
|
||||
"""Update the HEV cycle status from a LIFX Clean bulb."""
|
||||
if lifx_features(self.device)["hev"]:
|
||||
await async_execute_lifx(self.device.get_hev_cycle)
|
||||
|
||||
async def async_set_waveform_optional(
|
||||
self, value: dict[str, Any], rapid: bool = False
|
||||
) -> None:
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .const import DATA_LIFX_MANAGER, DOMAIN
|
||||
from .const import ATTR_INFRARED, ATTR_POWER, ATTR_ZONES, DATA_LIFX_MANAGER, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .entity import LIFXEntity
|
||||
from .manager import (
|
||||
@@ -39,13 +39,7 @@ from .manager import (
|
||||
)
|
||||
from .util import convert_8_to_16, convert_16_to_8, find_hsbk, lifx_features, merge_hsbk
|
||||
|
||||
SERVICE_LIFX_SET_STATE = "set_state"
|
||||
|
||||
COLOR_ZONE_POPULATE_DELAY = 0.3
|
||||
|
||||
ATTR_INFRARED = "infrared"
|
||||
ATTR_ZONES = "zones"
|
||||
ATTR_POWER = "power"
|
||||
LIFX_STATE_SETTLE_DELAY = 0.3
|
||||
|
||||
SERVICE_LIFX_SET_STATE = "set_state"
|
||||
|
||||
@@ -225,18 +219,16 @@ class LIFXLight(LIFXEntity, LightEntity):
|
||||
elif power_on:
|
||||
await self.set_power(True, duration=fade)
|
||||
else:
|
||||
if power_on:
|
||||
await self.set_power(True)
|
||||
if hsbk:
|
||||
await self.set_color(hsbk, kwargs, duration=fade)
|
||||
# The response from set_color will tell us if the
|
||||
# bulb is actually on or not, so we don't need to
|
||||
# call power_on if its already on
|
||||
if power_on and self.bulb.power_level == 0:
|
||||
await self.set_power(True)
|
||||
elif power_on:
|
||||
await self.set_power(True)
|
||||
if power_off:
|
||||
await self.set_power(False, duration=fade)
|
||||
|
||||
# Avoid state ping-pong by holding off updates as the state settles
|
||||
await asyncio.sleep(LIFX_STATE_SETTLE_DELAY)
|
||||
|
||||
# Update when the transition starts and ends
|
||||
await self.update_during_transition(fade)
|
||||
|
||||
@@ -344,7 +336,7 @@ class LIFXStrip(LIFXColor):
|
||||
# Zone brightness is not reported when powered off
|
||||
if not self.is_on and hsbk[HSBK_BRIGHTNESS] is None:
|
||||
await self.set_power(True)
|
||||
await asyncio.sleep(COLOR_ZONE_POPULATE_DELAY)
|
||||
await asyncio.sleep(LIFX_STATE_SETTLE_DELAY)
|
||||
await self.update_color_zones()
|
||||
await self.set_power(False)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""The Litter-Robot integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot, LitterRobot3, LitterRobot4
|
||||
from pylitterbot import FeederRobot, LitterRobot, LitterRobot3, Robot
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
@@ -10,65 +10,48 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import DOMAIN
|
||||
from .hub import LitterRobotHub
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
]
|
||||
|
||||
PLATFORMS_BY_TYPE = {
|
||||
LitterRobot: (
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
),
|
||||
LitterRobot3: (
|
||||
Platform.BUTTON,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
),
|
||||
LitterRobot4: (
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
),
|
||||
FeederRobot: (
|
||||
Platform.BUTTON,
|
||||
Robot: (
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
),
|
||||
LitterRobot: (Platform.VACUUM,),
|
||||
LitterRobot3: (Platform.BUTTON,),
|
||||
FeederRobot: (Platform.BUTTON,),
|
||||
}
|
||||
|
||||
|
||||
def get_platforms_for_robots(robots: list[Robot]) -> set[Platform]:
|
||||
"""Get platforms for robots."""
|
||||
return {
|
||||
platform
|
||||
for robot in robots
|
||||
for robot_type, platforms in PLATFORMS_BY_TYPE.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for platform in platforms
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Litter-Robot from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hub = hass.data[DOMAIN][entry.entry_id] = LitterRobotHub(hass, entry.data)
|
||||
await hub.login(load_robots=True)
|
||||
|
||||
platforms: set[str] = set()
|
||||
for robot in hub.account.robots:
|
||||
platforms.update(PLATFORMS_BY_TYPE[type(robot)])
|
||||
if platforms:
|
||||
if platforms := get_platforms_for_robots(hub.account.robots):
|
||||
await hass.config_entries.async_forward_entry_setups(entry, platforms)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
hub: LitterRobotHub = hass.data[DOMAIN][entry.entry_id]
|
||||
await hub.account.disconnect()
|
||||
|
||||
platforms = get_platforms_for_robots(hub.account.robots)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms)
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Litter-Robot",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/litterrobot",
|
||||
"requirements": ["pylitterbot==2022.8.2"],
|
||||
"requirements": ["pylitterbot==2022.9.1"],
|
||||
"codeowners": ["@natekspencer", "@tkdrob"],
|
||||
"dhcp": [{ "hostname": "litter-robot4" }],
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -48,10 +48,7 @@ class MelnorSwitch(MelnorBluetoothBaseEntity, SwitchEntity):
|
||||
super().__init__(coordinator)
|
||||
self._valve_index = valve_index
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"switch-{self._attr_unique_id}-zone{self._valve().id}-manual"
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}-zone{self._valve().id}-manual"
|
||||
self._attr_name = f"{self._device.name} Zone {self._valve().id+1}"
|
||||
|
||||
@property
|
||||
|
||||
@@ -20,7 +20,13 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
SERVICE_RELOAD,
|
||||
)
|
||||
from homeassistant.core import HassJob, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError, Unauthorized
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
@@ -68,9 +74,11 @@ from .const import ( # noqa: F401
|
||||
CONFIG_ENTRY_IS_SETUP,
|
||||
DATA_MQTT,
|
||||
DATA_MQTT_CONFIG,
|
||||
DATA_MQTT_DISCOVERY_REGISTRY_HOOKS,
|
||||
DATA_MQTT_RELOAD_DISPATCHERS,
|
||||
DATA_MQTT_RELOAD_ENTRY,
|
||||
DATA_MQTT_RELOAD_NEEDED,
|
||||
DATA_MQTT_SUBSCRIPTIONS_TO_RESTORE,
|
||||
DATA_MQTT_UPDATED_CONFIG,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_QOS,
|
||||
@@ -314,7 +322,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Bail out
|
||||
return False
|
||||
|
||||
hass.data[DATA_MQTT_DISCOVERY_REGISTRY_HOOKS] = {}
|
||||
hass.data[DATA_MQTT] = MQTT(hass, entry, conf)
|
||||
# Restore saved subscriptions
|
||||
if DATA_MQTT_SUBSCRIPTIONS_TO_RESTORE in hass.data:
|
||||
hass.data[DATA_MQTT].subscriptions = hass.data.pop(
|
||||
DATA_MQTT_SUBSCRIPTIONS_TO_RESTORE
|
||||
)
|
||||
entry.add_update_listener(_async_config_entry_updated)
|
||||
|
||||
await hass.data[DATA_MQTT].async_connect()
|
||||
@@ -438,6 +452,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_forward_entry_setup_and_setup_discovery(config_entry):
|
||||
"""Forward the config entry setup to the platforms and set up discovery."""
|
||||
reload_manual_setup: bool = False
|
||||
# Local import to avoid circular dependencies
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import device_automation, tag
|
||||
@@ -460,8 +475,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await _async_setup_discovery(hass, conf, entry)
|
||||
# Setup reload service after all platforms have loaded
|
||||
await async_setup_reload_service()
|
||||
# When the entry is reloaded, also reload manual set up items to enable MQTT
|
||||
if DATA_MQTT_RELOAD_ENTRY in hass.data:
|
||||
hass.data.pop(DATA_MQTT_RELOAD_ENTRY)
|
||||
reload_manual_setup = True
|
||||
|
||||
# When the entry was disabled before, reload manual set up items to enable MQTT again
|
||||
if DATA_MQTT_RELOAD_NEEDED in hass.data:
|
||||
hass.data.pop(DATA_MQTT_RELOAD_NEEDED)
|
||||
reload_manual_setup = True
|
||||
|
||||
if reload_manual_setup:
|
||||
await async_reload_manual_mqtt_items(hass)
|
||||
|
||||
await async_forward_entry_setup_and_setup_discovery(entry)
|
||||
@@ -613,8 +637,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
mqtt_client.cleanup()
|
||||
|
||||
# Trigger reload manual MQTT items at entry setup
|
||||
# Reload the legacy yaml platform
|
||||
await async_reload_integration_platforms(hass, DOMAIN, RELOADABLE_PLATFORMS)
|
||||
if (mqtt_entry_status := mqtt_config_entry_enabled(hass)) is False:
|
||||
# The entry is disabled reload legacy manual items when the entry is enabled again
|
||||
hass.data[DATA_MQTT_RELOAD_NEEDED] = True
|
||||
@@ -622,7 +644,19 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# The entry is reloaded:
|
||||
# Trigger re-fetching the yaml config at entry setup
|
||||
hass.data[DATA_MQTT_RELOAD_ENTRY] = True
|
||||
# Stop the loop
|
||||
# Reload the legacy yaml platform to make entities unavailable
|
||||
await async_reload_integration_platforms(hass, DOMAIN, RELOADABLE_PLATFORMS)
|
||||
# Cleanup entity registry hooks
|
||||
registry_hooks: dict[tuple, CALLBACK_TYPE] = hass.data[
|
||||
DATA_MQTT_DISCOVERY_REGISTRY_HOOKS
|
||||
]
|
||||
while registry_hooks:
|
||||
registry_hooks.popitem()[1]()
|
||||
# Wait for all ACKs and stop the loop
|
||||
await mqtt_client.async_disconnect()
|
||||
# Store remaining subscriptions to be able to restore or reload them
|
||||
# when the entry is set up again
|
||||
if mqtt_client.subscriptions:
|
||||
hass.data[DATA_MQTT_SUBSCRIPTIONS_TO_RESTORE] = mqtt_client.subscriptions
|
||||
|
||||
return True
|
||||
|
||||
@@ -309,7 +309,7 @@ class MQTT:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
hass,
|
||||
config_entry,
|
||||
conf,
|
||||
) -> None:
|
||||
@@ -435,12 +435,13 @@ class MQTT:
|
||||
"""Return False if there are unprocessed ACKs."""
|
||||
return not bool(self._pending_operations)
|
||||
|
||||
# wait for ACK-s to be processesed (unsubscribe only)
|
||||
# wait for ACKs to be processed
|
||||
async with self._pending_operations_condition:
|
||||
await self._pending_operations_condition.wait_for(no_more_acks)
|
||||
|
||||
# stop the MQTT loop
|
||||
await self.hass.async_add_executor_job(stop)
|
||||
async with self._paho_lock:
|
||||
await self.hass.async_add_executor_job(stop)
|
||||
|
||||
async def async_subscribe(
|
||||
self,
|
||||
@@ -501,7 +502,8 @@ class MQTT:
|
||||
async with self._paho_lock:
|
||||
mid = await self.hass.async_add_executor_job(_client_unsubscribe, topic)
|
||||
await self._register_mid(mid)
|
||||
self.hass.async_create_task(self._wait_for_mid(mid))
|
||||
|
||||
self.hass.async_create_task(self._wait_for_mid(mid))
|
||||
|
||||
async def _async_perform_subscriptions(
|
||||
self, subscriptions: Iterable[tuple[str, int]]
|
||||
|
||||
@@ -32,6 +32,8 @@ CONF_TLS_VERSION = "tls_version"
|
||||
|
||||
CONFIG_ENTRY_IS_SETUP = "mqtt_config_entry_is_setup"
|
||||
DATA_MQTT = "mqtt"
|
||||
DATA_MQTT_SUBSCRIPTIONS_TO_RESTORE = "mqtt_client_subscriptions"
|
||||
DATA_MQTT_DISCOVERY_REGISTRY_HOOKS = "mqtt_discovery_registry_hooks"
|
||||
DATA_MQTT_CONFIG = "mqtt_config"
|
||||
MQTT_DATA_DEVICE_TRACKER_LEGACY = "mqtt_device_tracker_legacy"
|
||||
DATA_MQTT_RELOAD_DISPATCHERS = "mqtt_reload_dispatchers"
|
||||
|
||||
@@ -249,7 +249,9 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid RGB color value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid RGB color value received for entity %s", self.entity_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -259,7 +261,9 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid XY color value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid XY color value received for entity %s", self.entity_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -269,12 +273,16 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid HS color value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid HS color value received for entity %s", self.entity_id
|
||||
)
|
||||
return
|
||||
else:
|
||||
color_mode = values["color_mode"]
|
||||
if not self._supports_color_mode(color_mode):
|
||||
_LOGGER.warning("Invalid color mode received")
|
||||
_LOGGER.warning(
|
||||
"Invalid color mode received for entity %s", self.entity_id
|
||||
)
|
||||
return
|
||||
try:
|
||||
if color_mode == ColorMode.COLOR_TEMP:
|
||||
@@ -314,7 +322,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
self._color_mode = ColorMode.XY
|
||||
self._xy = (x, y)
|
||||
except (KeyError, ValueError):
|
||||
_LOGGER.warning("Invalid or incomplete color value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid or incomplete color value received for entity %s",
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
def _prepare_subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
@@ -351,7 +362,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
except KeyError:
|
||||
pass
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.warning("Invalid brightness value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid brightness value received for entity %s",
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
if (
|
||||
self._supported_features
|
||||
@@ -366,7 +380,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
_LOGGER.warning("Invalid color temp value received")
|
||||
_LOGGER.warning(
|
||||
"Invalid color temp value received for entity %s",
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
if self._supported_features and LightEntityFeature.EFFECT:
|
||||
with suppress(KeyError):
|
||||
|
||||
@@ -28,7 +28,13 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
HomeAssistant,
|
||||
async_get_hass,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
@@ -48,6 +54,7 @@ from homeassistant.helpers.entity import (
|
||||
async_generate_entity_id,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_entity_registry_updated_event
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.json import json_loads
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -64,7 +71,9 @@ from .const import (
|
||||
CONF_TOPIC,
|
||||
DATA_MQTT,
|
||||
DATA_MQTT_CONFIG,
|
||||
DATA_MQTT_DISCOVERY_REGISTRY_HOOKS,
|
||||
DATA_MQTT_RELOAD_DISPATCHERS,
|
||||
DATA_MQTT_RELOAD_ENTRY,
|
||||
DATA_MQTT_UPDATED_CONFIG,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_PAYLOAD_AVAILABLE,
|
||||
@@ -363,6 +372,12 @@ async def async_setup_platform_helper(
|
||||
async_setup_entities: SetupEntity,
|
||||
) -> None:
|
||||
"""Help to set up the platform for manual configured MQTT entities."""
|
||||
if DATA_MQTT_RELOAD_ENTRY in hass.data:
|
||||
_LOGGER.debug(
|
||||
"MQTT integration is %s, skipping setup of manually configured MQTT items while unloading the config entry",
|
||||
platform_domain,
|
||||
)
|
||||
return
|
||||
if not (entry_status := mqtt_config_entry_enabled(hass)):
|
||||
_LOGGER.warning(
|
||||
"MQTT integration is %s, skipping setup of manually configured MQTT %s",
|
||||
@@ -647,6 +662,17 @@ async def async_remove_discovery_payload(hass: HomeAssistant, discovery_data: di
|
||||
await async_publish(hass, discovery_topic, "", retain=True)
|
||||
|
||||
|
||||
async def async_clear_discovery_topic_if_entity_removed(
|
||||
hass: HomeAssistant,
|
||||
discovery_data: dict[str, Any],
|
||||
event: Event,
|
||||
) -> None:
|
||||
"""Clear the discovery topic if the entity is removed."""
|
||||
if event.data["action"] == "remove":
|
||||
# publish empty payload to config topic to avoid re-adding
|
||||
await async_remove_discovery_payload(hass, discovery_data)
|
||||
|
||||
|
||||
class MqttDiscoveryDeviceUpdate:
|
||||
"""Add support for auto discovery for platforms without an entity."""
|
||||
|
||||
@@ -780,7 +806,8 @@ class MqttDiscoveryUpdate(Entity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
discovery_data: dict,
|
||||
hass: HomeAssistant,
|
||||
discovery_data: dict | None,
|
||||
discovery_update: Callable | None = None,
|
||||
) -> None:
|
||||
"""Initialize the discovery update mixin."""
|
||||
@@ -788,6 +815,14 @@ class MqttDiscoveryUpdate(Entity):
|
||||
self._discovery_update = discovery_update
|
||||
self._remove_discovery_updated: Callable | None = None
|
||||
self._removed_from_hass = False
|
||||
if discovery_data is None:
|
||||
return
|
||||
self._registry_hooks: dict[tuple, CALLBACK_TYPE] = hass.data[
|
||||
DATA_MQTT_DISCOVERY_REGISTRY_HOOKS
|
||||
]
|
||||
discovery_hash: tuple[str, str] = discovery_data[ATTR_DISCOVERY_HASH]
|
||||
if discovery_hash in self._registry_hooks:
|
||||
self._registry_hooks.pop(discovery_hash)()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to discovery updates."""
|
||||
@@ -850,7 +885,7 @@ class MqttDiscoveryUpdate(Entity):
|
||||
|
||||
async def async_removed_from_registry(self) -> None:
|
||||
"""Clear retained discovery topic in broker."""
|
||||
if not self._removed_from_hass:
|
||||
if not self._removed_from_hass and self._discovery_data is not None:
|
||||
# Stop subscribing to discovery updates to not trigger when we clear the
|
||||
# discovery topic
|
||||
self._cleanup_discovery_on_remove()
|
||||
@@ -861,7 +896,20 @@ class MqttDiscoveryUpdate(Entity):
|
||||
@callback
|
||||
def add_to_platform_abort(self) -> None:
|
||||
"""Abort adding an entity to a platform."""
|
||||
if self._discovery_data:
|
||||
if self._discovery_data is not None:
|
||||
discovery_hash: tuple = self._discovery_data[ATTR_DISCOVERY_HASH]
|
||||
if self.registry_entry is not None:
|
||||
self._registry_hooks[
|
||||
discovery_hash
|
||||
] = async_track_entity_registry_updated_event(
|
||||
self.hass,
|
||||
self.entity_id,
|
||||
partial(
|
||||
async_clear_discovery_topic_if_entity_removed,
|
||||
self.hass,
|
||||
self._discovery_data,
|
||||
),
|
||||
)
|
||||
stop_discovery_updates(self.hass, self._discovery_data)
|
||||
send_discovery_done(self.hass, self._discovery_data)
|
||||
super().add_to_platform_abort()
|
||||
@@ -969,7 +1017,7 @@ class MqttEntity(
|
||||
# Initialize mixin classes
|
||||
MqttAttributes.__init__(self, config)
|
||||
MqttAvailability.__init__(self, config)
|
||||
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
|
||||
MqttDiscoveryUpdate.__init__(self, hass, discovery_data, self.discovery_update)
|
||||
MqttEntityDeviceInfo.__init__(self, config.get(CONF_DEVICE), config_entry)
|
||||
|
||||
def _init_entity_id(self):
|
||||
|
||||
@@ -130,4 +130,4 @@ class OpenWeatherMapOptionsFlow(config_entries.OptionsFlow):
|
||||
|
||||
async def _is_owm_api_online(hass, api_key, lat, lon):
|
||||
owm = OWM(api_key).weather_manager()
|
||||
return await hass.async_add_executor_job(owm.one_call, lat, lon)
|
||||
return await hass.async_add_executor_job(owm.weather_at_coords, lat, lon)
|
||||
|
||||
@@ -373,7 +373,7 @@ class Luminary(LightEntity):
|
||||
self._max_mireds = color_util.color_temperature_kelvin_to_mired(
|
||||
self._luminary.min_temp() or DEFAULT_KELVIN
|
||||
)
|
||||
if len(self._attr_supported_color_modes == 1):
|
||||
if len(self._attr_supported_color_modes) == 1:
|
||||
# The light supports only a single color mode
|
||||
self._attr_color_mode = list(self._attr_supported_color_modes)[0]
|
||||
|
||||
@@ -392,7 +392,7 @@ class Luminary(LightEntity):
|
||||
if ColorMode.HS in self._attr_supported_color_modes:
|
||||
self._rgb_color = self._luminary.rgb()
|
||||
|
||||
if len(self._attr_supported_color_modes > 1):
|
||||
if len(self._attr_supported_color_modes) > 1:
|
||||
# The light supports hs + color temp, determine which one it is
|
||||
if self._rgb_color == (0, 0, 0):
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
|
||||
@@ -91,7 +91,7 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.discovery_info = discovery_info
|
||||
_properties = discovery_info.properties
|
||||
|
||||
unique_id = discovery_info.hostname.split(".")[0]
|
||||
unique_id = discovery_info.hostname.split(".")[0].split("-")[0]
|
||||
if config_entry := await self.async_set_unique_id(unique_id):
|
||||
try:
|
||||
await validate_gw_input(
|
||||
|
||||
@@ -128,7 +128,7 @@ class PushoverNotificationService(BaseNotificationService):
|
||||
self.pushover.send_message(
|
||||
self._user_key,
|
||||
message,
|
||||
kwargs.get(ATTR_TARGET),
|
||||
",".join(kwargs.get(ATTR_TARGET, [])),
|
||||
title,
|
||||
url,
|
||||
url_title,
|
||||
|
||||
@@ -11,8 +11,8 @@
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"requirements": ["qingping-ble==0.6.0"],
|
||||
"requirements": ["qingping-ble==0.7.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"codeowners": ["@bdraco", "@skgsergio"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Any
|
||||
|
||||
from regenmaschine import Client
|
||||
from regenmaschine.controller import Controller
|
||||
from regenmaschine.errors import RainMachineError
|
||||
from regenmaschine.errors import RainMachineError, UnknownAPICallError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
@@ -190,7 +190,9 @@ async def async_update_programs_and_zones(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry( # noqa: C901
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> bool:
|
||||
"""Set up RainMachine as config entry."""
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
client = Client(session=websession)
|
||||
@@ -244,6 +246,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
data = await controller.restrictions.universal()
|
||||
else:
|
||||
data = await controller.zones.all(details=True, include_inactive=True)
|
||||
except UnknownAPICallError:
|
||||
LOGGER.info(
|
||||
"Skipping unsupported API call for controller %s: %s",
|
||||
controller.name,
|
||||
api_category,
|
||||
)
|
||||
except RainMachineError as err:
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
|
||||
@@ -175,7 +175,9 @@ class ProvisionSettingsBinarySensor(RainMachineEntity, BinarySensorEntity):
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the state."""
|
||||
if self.entity_description.key == TYPE_FLOW_SENSOR:
|
||||
self._attr_is_on = self.coordinator.data["system"].get("useFlowSensor")
|
||||
self._attr_is_on = self.coordinator.data.get("system", {}).get(
|
||||
"useFlowSensor"
|
||||
)
|
||||
|
||||
|
||||
class UniversalRestrictionsBinarySensor(RainMachineEntity, BinarySensorEntity):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "RainMachine",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
|
||||
"requirements": ["regenmaschine==2022.08.0"],
|
||||
"requirements": ["regenmaschine==2022.09.1"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "local_polling",
|
||||
"homekit": {
|
||||
|
||||
@@ -273,12 +273,14 @@ class ProvisionSettingsSensor(RainMachineEntity, SensorEntity):
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the state."""
|
||||
if self.entity_description.key == TYPE_FLOW_SENSOR_CLICK_M3:
|
||||
self._attr_native_value = self.coordinator.data["system"].get(
|
||||
self._attr_native_value = self.coordinator.data.get("system", {}).get(
|
||||
"flowSensorClicksPerCubicMeter"
|
||||
)
|
||||
elif self.entity_description.key == TYPE_FLOW_SENSOR_CONSUMED_LITERS:
|
||||
clicks = self.coordinator.data["system"].get("flowSensorWateringClicks")
|
||||
clicks_per_m3 = self.coordinator.data["system"].get(
|
||||
clicks = self.coordinator.data.get("system", {}).get(
|
||||
"flowSensorWateringClicks"
|
||||
)
|
||||
clicks_per_m3 = self.coordinator.data.get("system", {}).get(
|
||||
"flowSensorClicksPerCubicMeter"
|
||||
)
|
||||
|
||||
@@ -287,11 +289,11 @@ class ProvisionSettingsSensor(RainMachineEntity, SensorEntity):
|
||||
else:
|
||||
self._attr_native_value = None
|
||||
elif self.entity_description.key == TYPE_FLOW_SENSOR_START_INDEX:
|
||||
self._attr_native_value = self.coordinator.data["system"].get(
|
||||
self._attr_native_value = self.coordinator.data.get("system", {}).get(
|
||||
"flowSensorStartIndex"
|
||||
)
|
||||
elif self.entity_description.key == TYPE_FLOW_SENSOR_WATERING_CLICKS:
|
||||
self._attr_native_value = self.coordinator.data["system"].get(
|
||||
self._attr_native_value = self.coordinator.data.get("system", {}).get(
|
||||
"flowSensorWateringClicks"
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user