forked from home-assistant/core
Compare commits
209 Commits
2022.9.0b3
...
2022.9.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77a933d6f0 | ||
|
|
04d4483998 | ||
|
|
217ea5e676 | ||
|
|
943cca3d23 | ||
|
|
a0f2571872 | ||
|
|
f2860fd6b3 | ||
|
|
44109a6922 | ||
|
|
b55ce96a56 | ||
|
|
f2fe91dac1 | ||
|
|
eb80062b26 | ||
|
|
3b8f08270e | ||
|
|
60c78fd33f | ||
|
|
12903a14c5 | ||
|
|
32cea6b95d | ||
|
|
b4f1683c40 | ||
|
|
7be5fde8d6 | ||
|
|
d44ff16f9d | ||
|
|
b51dc0884e | ||
|
|
f3451858ef | ||
|
|
103f490519 | ||
|
|
68fa40c0fa | ||
|
|
5c294550e8 | ||
|
|
72769130f9 | ||
|
|
8f21e7775b | ||
|
|
6704efd1ef | ||
|
|
829777a211 | ||
|
|
48c6fbf22e | ||
|
|
fac2a46781 | ||
|
|
a688b4c581 | ||
|
|
8c9e0a8239 | ||
|
|
91398b6a75 | ||
|
|
dea221b155 | ||
|
|
bfcb333227 | ||
|
|
3dd0dbf38f | ||
|
|
4894e2e5a4 | ||
|
|
e7a616e8e4 | ||
|
|
b3a4838978 | ||
|
|
933dde1d1e | ||
|
|
a411cd9c20 | ||
|
|
da81dbe6ac | ||
|
|
f5c30ab10a | ||
|
|
454675d86b | ||
|
|
cce4496ad6 | ||
|
|
ebeebeaec1 | ||
|
|
c8d16175da | ||
|
|
a2aa0e608d | ||
|
|
7eb98ffbd1 | ||
|
|
6e62080cd9 | ||
|
|
39dee6d426 | ||
|
|
3a89a49d4a | ||
|
|
ef66d8e705 | ||
|
|
c1809681b6 | ||
|
|
050c09df62 | ||
|
|
e0b63ac488 | ||
|
|
ed6575fefb | ||
|
|
318ae7750a | ||
|
|
0525a1cd97 | ||
|
|
d31d4e2916 | ||
|
|
40c5689507 | ||
|
|
a4749178f1 | ||
|
|
8229e241f1 | ||
|
|
2b40f3f1e5 | ||
|
|
e839849456 | ||
|
|
e711758cfd | ||
|
|
896955e4df | ||
|
|
7b83807baa | ||
|
|
6a197332c7 | ||
|
|
1955ff9e0d | ||
|
|
29caf06439 | ||
|
|
0b5953038e | ||
|
|
f07e1bc500 | ||
|
|
843d5f101a | ||
|
|
d98ed5c6f6 | ||
|
|
8599472880 | ||
|
|
04d6bb085b | ||
|
|
6f9a311cec | ||
|
|
336179df6d | ||
|
|
9459af30b0 | ||
|
|
ee07ca8caa | ||
|
|
3beed13586 | ||
|
|
f0753f7a97 | ||
|
|
dd007cd765 | ||
|
|
7cdac3ee8c | ||
|
|
cd7f65bb6a | ||
|
|
b21a37cad5 | ||
|
|
bfcb9402ef | ||
|
|
ad396f0538 | ||
|
|
12edfb3929 | ||
|
|
47f6be77cc | ||
|
|
9acf74d783 | ||
|
|
0aa2685e0c | ||
|
|
a90b6d37bf | ||
|
|
d6bf1a8caa | ||
|
|
95a89448e0 | ||
|
|
f6d26476b5 | ||
|
|
9640553b52 | ||
|
|
3129114d07 | ||
|
|
184a1c95f0 | ||
|
|
f18ab504a5 | ||
|
|
2bd71f62ea | ||
|
|
296db8b2af | ||
|
|
a277664187 | ||
|
|
1b7a06912a | ||
|
|
e7986a54a5 | ||
|
|
de8b066a1d | ||
|
|
4d4a87ba05 | ||
|
|
4b79e82e31 | ||
|
|
1e8f461270 | ||
|
|
6e88b8d3d5 | ||
|
|
a626ab4f1a | ||
|
|
c7cb0d1a07 | ||
|
|
183c61b6ca | ||
|
|
95c20df367 | ||
|
|
a969ce273a | ||
|
|
5f90760176 | ||
|
|
795be361b4 | ||
|
|
cdd5c809bb | ||
|
|
c731e2f125 | ||
|
|
1789a8a385 | ||
|
|
57717f13fc | ||
|
|
e4aab6a818 | ||
|
|
258791626e | ||
|
|
78802c8480 | ||
|
|
b24f3725d6 | ||
|
|
06116f76fa | ||
|
|
27c0a37053 | ||
|
|
2b961fd327 | ||
|
|
125afb39f0 | ||
|
|
3ee62d619f | ||
|
|
dc7c860c6a | ||
|
|
f042cc5d7b | ||
|
|
4c0872b4e4 | ||
|
|
21f6b50f7c | ||
|
|
d670df74cb | ||
|
|
0a7f3f6ced | ||
|
|
fee9a303ff | ||
|
|
a4f398a750 | ||
|
|
c873eae79c | ||
|
|
d559b6482a | ||
|
|
760853f615 | ||
|
|
cfe8ebdad4 | ||
|
|
2ddd1b516c | ||
|
|
3b025b211e | ||
|
|
4009a32fb5 | ||
|
|
6f3b49601e | ||
|
|
31858ad779 | ||
|
|
ab9d9d599e | ||
|
|
ce6d337bd5 | ||
|
|
3fd887b1f2 | ||
|
|
996a3477b0 | ||
|
|
910f27f3a2 | ||
|
|
4ab5cdcb79 | ||
|
|
e69fde6875 | ||
|
|
10f7e2ff8a | ||
|
|
3acc3af38c | ||
|
|
a3edbfc601 | ||
|
|
941a5e3820 | ||
|
|
2eeab820b7 | ||
|
|
8d0ebdd1f9 | ||
|
|
9901b31316 | ||
|
|
a4f528e908 | ||
|
|
9aa87761cf | ||
|
|
d1b637ea7a | ||
|
|
c8ad8a6d86 | ||
|
|
9155f669e9 | ||
|
|
e1e153f391 | ||
|
|
1dbcf88e15 | ||
|
|
a13438c5b0 | ||
|
|
d98687b789 | ||
|
|
319b0b8902 | ||
|
|
62dcbc4d4a | ||
|
|
6989b16274 | ||
|
|
31d085cdf8 | ||
|
|
61ee621c90 | ||
|
|
f5e61ecdec | ||
|
|
2bfcdc66b6 | ||
|
|
3240f8f938 | ||
|
|
74ddc336ca | ||
|
|
6c36d5acaa | ||
|
|
e8c4711d88 | ||
|
|
bca9dc1f61 | ||
|
|
4f8421617e | ||
|
|
40421b41f7 | ||
|
|
b0ff4fc057 | ||
|
|
605e350159 | ||
|
|
ad8cd9c957 | ||
|
|
e8ab4eef44 | ||
|
|
b1241bf0f2 | ||
|
|
f3e811417f | ||
|
|
1231ba4d03 | ||
|
|
e07554dc25 | ||
|
|
2fa517b81b | ||
|
|
0d042d496d | ||
|
|
c8156d5de6 | ||
|
|
9f06baa778 | ||
|
|
52abf0851b | ||
|
|
da83ceca5b | ||
|
|
f9b95cc4a4 | ||
|
|
f60ae40661 | ||
|
|
ea0b406692 | ||
|
|
9387449abf | ||
|
|
5f4013164c | ||
|
|
3856178dc0 | ||
|
|
32a9fba58e | ||
|
|
9733887b6a | ||
|
|
b215514c90 | ||
|
|
0e930fd626 | ||
|
|
cd4c31bc79 | ||
|
|
bc04755d05 |
@@ -587,7 +587,7 @@ omit =
|
||||
homeassistant/components/iqvia/sensor.py
|
||||
homeassistant/components/irish_rail_transport/sensor.py
|
||||
homeassistant/components/iss/__init__.py
|
||||
homeassistant/components/iss/binary_sensor.py
|
||||
homeassistant/components/iss/sensor.py
|
||||
homeassistant/components/isy994/__init__.py
|
||||
homeassistant/components/isy994/binary_sensor.py
|
||||
homeassistant/components/isy994/climate.py
|
||||
@@ -1216,7 +1216,7 @@ omit =
|
||||
homeassistant/components/switchbot/const.py
|
||||
homeassistant/components/switchbot/entity.py
|
||||
homeassistant/components/switchbot/cover.py
|
||||
homeassistant/components/switchbot/light.py
|
||||
homeassistant/components/switchbot/light.py
|
||||
homeassistant/components/switchbot/sensor.py
|
||||
homeassistant/components/switchbot/coordinator.py
|
||||
homeassistant/components/switchmate/switch.py
|
||||
|
||||
13
.github/workflows/ci.yaml
vendored
13
.github/workflows/ci.yaml
vendored
@@ -23,7 +23,8 @@ env:
|
||||
CACHE_VERSION: 1
|
||||
PIP_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: 2022.9
|
||||
DEFAULT_PYTHON: 3.9
|
||||
DEFAULT_PYTHON: 3.9.14
|
||||
ALL_PYTHON_VERSIONS: "['3.9.14', '3.10.7']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
PIP_CACHE: /tmp/pip-cache
|
||||
SQLALCHEMY_WARN_20: 1
|
||||
@@ -46,6 +47,7 @@ jobs:
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
python_versions: ${{ steps.info.outputs.python_versions }}
|
||||
test_full_suite: ${{ steps.info.outputs.test_full_suite }}
|
||||
test_group_count: ${{ steps.info.outputs.test_group_count }}
|
||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||
@@ -143,6 +145,8 @@ jobs:
|
||||
fi
|
||||
|
||||
# Output & sent to GitHub Actions
|
||||
echo "python_versions: ${ALL_PYTHON_VERSIONS}"
|
||||
echo "::set-output name=python_versions::${ALL_PYTHON_VERSIONS}"
|
||||
echo "test_full_suite: ${test_full_suite}"
|
||||
echo "::set-output name=test_full_suite::${test_full_suite}"
|
||||
echo "integrations_glob: ${integrations_glob}"
|
||||
@@ -169,7 +173,6 @@ jobs:
|
||||
uses: actions/setup-python@v4.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache: "pip"
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.8
|
||||
@@ -464,7 +467,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10"]
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
@@ -683,7 +686,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10"]
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Run pip check ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -730,7 +733,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
python-version: ["3.9", "3.10"]
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
|
||||
@@ -137,6 +137,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/blebox/ @bbx-a @riokuu
|
||||
/homeassistant/components/blink/ @fronzbot
|
||||
/tests/components/blink/ @fronzbot
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
/tests/components/blueprint/ @home-assistant/core
|
||||
/homeassistant/components/bluesound/ @thrawnarn
|
||||
@@ -275,7 +277,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ecobee/ @marthoc
|
||||
/homeassistant/components/econet/ @vangorra @w1ll1am23
|
||||
/tests/components/econet/ @vangorra @w1ll1am23
|
||||
/homeassistant/components/ecovacs/ @OverloadUT
|
||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185
|
||||
/homeassistant/components/ecowitt/ @pvizeli
|
||||
/tests/components/ecowitt/ @pvizeli
|
||||
/homeassistant/components/edl21/ @mtdcr
|
||||
@@ -865,8 +867,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/pvpc_hourly_pricing/ @azogue
|
||||
/tests/components/pvpc_hourly_pricing/ @azogue
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qingping/ @bdraco @skgsergio
|
||||
/tests/components/qingping/ @bdraco @skgsergio
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
/tests/components/qld_bushfire/ @exxamalte
|
||||
/homeassistant/components/qnap_qsw/ @Noltari
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
},
|
||||
"description": "Go to {api_url} to generate an API key"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_token": "Invalid API key",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "Unexpected error"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ from datetime import timedelta
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DEFAULT_TIMEOUT = 15
|
||||
DEFAULT_TIMEOUT = 25
|
||||
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file"
|
||||
CONF_LOGIN_METHOD = "login_method"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "august",
|
||||
"name": "August",
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"requirements": ["yalexs==1.2.1"],
|
||||
"requirements": ["yalexs==1.2.2"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
@@ -9,6 +9,7 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant.components import blueprint
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -20,6 +21,7 @@ from homeassistant.const import (
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
@@ -224,6 +226,21 @@ def areas_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||
return list(automation_entity.referenced_areas)
|
||||
|
||||
|
||||
@callback
|
||||
def automations_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[str]:
|
||||
"""Return all automations that reference the blueprint."""
|
||||
if DOMAIN not in hass.data:
|
||||
return []
|
||||
|
||||
component = hass.data[DOMAIN]
|
||||
|
||||
return [
|
||||
automation_entity.entity_id
|
||||
for automation_entity in component.entities
|
||||
if automation_entity.referenced_blueprint == blueprint_path
|
||||
]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up all automations."""
|
||||
hass.data[DOMAIN] = component = EntityComponent(LOGGER, DOMAIN, hass)
|
||||
@@ -346,7 +363,14 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
return self.action_script.referenced_areas
|
||||
|
||||
@property
|
||||
def referenced_devices(self):
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
"""Return referenced blueprint or None."""
|
||||
if self._blueprint_inputs is None:
|
||||
return None
|
||||
return cast(str, self._blueprint_inputs[CONF_USE_BLUEPRINT][CONF_PATH])
|
||||
|
||||
@property
|
||||
def referenced_devices(self) -> set[str]:
|
||||
"""Return a set of referenced devices."""
|
||||
if self._referenced_devices is not None:
|
||||
return self._referenced_devices
|
||||
|
||||
@@ -8,8 +8,15 @@ from .const import DOMAIN, LOGGER
|
||||
DATA_BLUEPRINTS = "automation_blueprints"
|
||||
|
||||
|
||||
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
||||
"""Return True if any automation references the blueprint."""
|
||||
from . import automations_with_blueprint # pylint: disable=import-outside-toplevel
|
||||
|
||||
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
||||
|
||||
|
||||
@singleton(DATA_BLUEPRINTS)
|
||||
@callback
|
||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||
"""Get automation blueprints."""
|
||||
return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER)
|
||||
return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER, _blueprint_in_use)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "blink",
|
||||
"name": "Blink",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"requirements": ["blinkpy==0.19.0"],
|
||||
"requirements": ["blinkpy==0.19.2"],
|
||||
"codeowners": ["@fronzbot"],
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
49
homeassistant/components/bluemaestro/__init__.py
Normal file
49
homeassistant/components/bluemaestro/__init__.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""The BlueMaestro integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from bluemaestro_ble import BlueMaestroBluetoothDeviceData
|
||||
|
||||
from homeassistant.components.bluetooth import BluetoothScanningMode
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up BlueMaestro BLE device from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
data = BlueMaestroBluetoothDeviceData()
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
] = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
coordinator.async_start()
|
||||
) # only start after all platforms have had a chance to subscribe
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
94
homeassistant/components/bluemaestro/config_flow.py
Normal file
94
homeassistant/components/bluemaestro/config_flow.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Config flow for bluemaestro ble integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bluemaestro_ble import BlueMaestroBluetoothDeviceData as DeviceData
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for bluemaestro."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovery_info: BluetoothServiceInfoBleak | None = None
|
||||
self._discovered_device: DeviceData | None = None
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> FlowResult:
|
||||
"""Handle the bluetooth discovery step."""
|
||||
await self.async_set_unique_id(discovery_info.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
device = DeviceData()
|
||||
if not device.supported(discovery_info):
|
||||
return self.async_abort(reason="not_supported")
|
||||
self._discovery_info = discovery_info
|
||||
self._discovered_device = device
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
device = self._discovered_device
|
||||
assert self._discovery_info is not None
|
||||
discovery_info = self._discovery_info
|
||||
title = device.title or device.get_device_name() or discovery_info.name
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title=title, data={})
|
||||
|
||||
self._set_confirm_only()
|
||||
placeholders = {"name": title}
|
||||
self.context["title_placeholders"] = placeholders
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm", description_placeholders=placeholders
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the user step to pick discovered device."""
|
||||
if user_input is not None:
|
||||
address = user_input[CONF_ADDRESS]
|
||||
await self.async_set_unique_id(address, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
continue
|
||||
device = DeviceData()
|
||||
if device.supported(discovery_info):
|
||||
self._discovered_devices[address] = (
|
||||
device.title or device.get_device_name() or discovery_info.name
|
||||
)
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices)}
|
||||
),
|
||||
)
|
||||
3
homeassistant/components/bluemaestro/const.py
Normal file
3
homeassistant/components/bluemaestro/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Constants for the BlueMaestro integration."""
|
||||
|
||||
DOMAIN = "bluemaestro"
|
||||
31
homeassistant/components/bluemaestro/device.py
Normal file
31
homeassistant/components/bluemaestro/device.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Support for BlueMaestro devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from bluemaestro_ble import DeviceKey, SensorDeviceInfo
|
||||
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothEntityKey,
|
||||
)
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
|
||||
|
||||
def device_key_to_bluetooth_entity_key(
|
||||
device_key: DeviceKey,
|
||||
) -> PassiveBluetoothEntityKey:
|
||||
"""Convert a device key to an entity key."""
|
||||
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
|
||||
|
||||
|
||||
def sensor_device_info_to_hass(
|
||||
sensor_device_info: SensorDeviceInfo,
|
||||
) -> DeviceInfo:
|
||||
"""Convert a bluemaestro device info to a sensor device info."""
|
||||
hass_device_info = DeviceInfo({})
|
||||
if sensor_device_info.name is not None:
|
||||
hass_device_info[ATTR_NAME] = sensor_device_info.name
|
||||
if sensor_device_info.manufacturer is not None:
|
||||
hass_device_info[ATTR_MANUFACTURER] = sensor_device_info.manufacturer
|
||||
if sensor_device_info.model is not None:
|
||||
hass_device_info[ATTR_MODEL] = sensor_device_info.model
|
||||
return hass_device_info
|
||||
16
homeassistant/components/bluemaestro/manifest.json
Normal file
16
homeassistant/components/bluemaestro/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"domain": "bluemaestro",
|
||||
"name": "BlueMaestro",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 307,
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"requirements": ["bluemaestro-ble==0.2.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
149
homeassistant/components/bluemaestro/sensor.py
Normal file
149
homeassistant/components/bluemaestro/sensor.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""Support for BlueMaestro sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
from bluemaestro_ble import (
|
||||
SensorDeviceClass as BlueMaestroSensorDeviceClass,
|
||||
SensorUpdate,
|
||||
Units,
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
PassiveBluetoothDataUpdate,
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
PRESSURE_MBAR,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
TEMP_CELSIUS,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .device import device_key_to_bluetooth_entity_key, sensor_device_info_to_hass
|
||||
|
||||
SENSOR_DESCRIPTIONS = {
|
||||
(BlueMaestroSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.BATTERY}_{Units.PERCENTAGE}",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
(BlueMaestroSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.HUMIDITY}_{Units.PERCENTAGE}",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.SIGNAL_STRENGTH,
|
||||
Units.SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.SIGNAL_STRENGTH}_{Units.SIGNAL_STRENGTH_DECIBELS_MILLIWATT}",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.TEMPERATURE,
|
||||
Units.TEMP_CELSIUS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.TEMPERATURE}_{Units.TEMP_CELSIUS}",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.DEW_POINT,
|
||||
Units.TEMP_CELSIUS,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.DEW_POINT}_{Units.TEMP_CELSIUS}",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(
|
||||
BlueMaestroSensorDeviceClass.PRESSURE,
|
||||
Units.PRESSURE_MBAR,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BlueMaestroSensorDeviceClass.PRESSURE}_{Units.PRESSURE_MBAR}",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
native_unit_of_measurement=PRESSURE_MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def sensor_update_to_bluetooth_data_update(
|
||||
sensor_update: SensorUpdate,
|
||||
) -> PassiveBluetoothDataUpdate:
|
||||
"""Convert a sensor update to a bluetooth data update."""
|
||||
return PassiveBluetoothDataUpdate(
|
||||
devices={
|
||||
device_id: sensor_device_info_to_hass(device_info)
|
||||
for device_id, device_info in sensor_update.devices.items()
|
||||
},
|
||||
entity_descriptions={
|
||||
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
|
||||
(description.device_class, description.native_unit_of_measurement)
|
||||
]
|
||||
for device_key, description in sensor_update.entity_descriptions.items()
|
||||
if description.device_class and description.native_unit_of_measurement
|
||||
},
|
||||
entity_data={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BlueMaestro BLE sensors."""
|
||||
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
BlueMaestroBluetoothSensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
|
||||
|
||||
class BlueMaestroBluetoothSensorEntity(
|
||||
PassiveBluetoothProcessorEntity[
|
||||
PassiveBluetoothDataProcessor[Optional[Union[float, int]]]
|
||||
],
|
||||
SensorEntity,
|
||||
):
|
||||
"""Representation of a BlueMaestro sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
"""Return the native value."""
|
||||
return self.processor.entity_data.get(self.entity_key)
|
||||
22
homeassistant/components/bluemaestro/strings.json
Normal file
22
homeassistant/components/bluemaestro/strings.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "[%key:component::bluetooth::config::flow_title%]",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:component::bluetooth::config::step::user::data::address%]"
|
||||
}
|
||||
},
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"not_supported": "Device not supported",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
22
homeassistant/components/bluemaestro/translations/en.json
Normal file
22
homeassistant/components/bluemaestro/translations/en.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Device is already configured",
|
||||
"already_in_progress": "Configuration flow is already in progress",
|
||||
"no_devices_found": "No devices found on the network",
|
||||
"not_supported": "Device not supported"
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"address": "Device"
|
||||
},
|
||||
"description": "Choose a device to setup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
from .const import DOMAIN # noqa: F401
|
||||
from .const import CONF_USE_BLUEPRINT, DOMAIN # noqa: F401
|
||||
from .errors import ( # noqa: F401
|
||||
BlueprintException,
|
||||
BlueprintWithNameException,
|
||||
|
||||
@@ -91,3 +91,11 @@ class FileAlreadyExists(BlueprintWithNameException):
|
||||
def __init__(self, domain: str, blueprint_name: str) -> None:
|
||||
"""Initialize blueprint exception."""
|
||||
super().__init__(domain, blueprint_name, "Blueprint already exists")
|
||||
|
||||
|
||||
class BlueprintInUse(BlueprintWithNameException):
|
||||
"""Error when a blueprint is in use."""
|
||||
|
||||
def __init__(self, domain: str, blueprint_name: str) -> None:
|
||||
"""Initialize blueprint exception."""
|
||||
super().__init__(domain, blueprint_name, "Blueprint in use")
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
@@ -35,6 +36,7 @@ from .const import (
|
||||
)
|
||||
from .errors import (
|
||||
BlueprintException,
|
||||
BlueprintInUse,
|
||||
FailedToLoad,
|
||||
FileAlreadyExists,
|
||||
InvalidBlueprint,
|
||||
@@ -183,11 +185,13 @@ class DomainBlueprints:
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
logger: logging.Logger,
|
||||
blueprint_in_use: Callable[[HomeAssistant, str], bool],
|
||||
) -> None:
|
||||
"""Initialize a domain blueprints instance."""
|
||||
self.hass = hass
|
||||
self.domain = domain
|
||||
self.logger = logger
|
||||
self._blueprint_in_use = blueprint_in_use
|
||||
self._blueprints: dict[str, Blueprint | None] = {}
|
||||
self._load_lock = asyncio.Lock()
|
||||
|
||||
@@ -302,6 +306,8 @@ class DomainBlueprints:
|
||||
|
||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||
"""Remove a blueprint file."""
|
||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
||||
raise BlueprintInUse(self.domain, blueprint_path)
|
||||
path = self.blueprint_folder / blueprint_path
|
||||
await self.hass.async_add_executor_job(path.unlink)
|
||||
self._blueprints[blueprint_path] = None
|
||||
|
||||
@@ -6,6 +6,8 @@ import logging
|
||||
import time
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from bleak import BleakError
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
|
||||
@@ -109,6 +111,13 @@ class ActiveBluetoothProcessorCoordinator(
|
||||
|
||||
try:
|
||||
update = await self._async_poll_data(self._last_service_info)
|
||||
except BleakError as exc:
|
||||
if self.last_poll_successful:
|
||||
self.logger.error(
|
||||
"%s: Bluetooth error whilst polling: %s", self.address, str(exc)
|
||||
)
|
||||
self.last_poll_successful = False
|
||||
return
|
||||
except Exception: # pylint: disable=broad-except
|
||||
if self.last_poll_successful:
|
||||
self.logger.exception("%s: Failure while polling", self.address)
|
||||
|
||||
@@ -58,7 +58,7 @@ class AdapterDetails(TypedDict, total=False):
|
||||
|
||||
address: str
|
||||
sw_version: str
|
||||
hw_version: str
|
||||
hw_version: str | None
|
||||
passive_scan: bool
|
||||
|
||||
|
||||
|
||||
@@ -54,8 +54,13 @@ if TYPE_CHECKING:
|
||||
|
||||
FILTER_UUIDS: Final = "UUIDs"
|
||||
|
||||
APPLE_MFR_ID: Final = 76
|
||||
APPLE_HOMEKIT_START_BYTE: Final = 0x06 # homekit_controller
|
||||
APPLE_DEVICE_ID_START_BYTE: Final = 0x10 # bluetooth_le_tracker
|
||||
APPLE_START_BYTES_WANTED: Final = {APPLE_DEVICE_ID_START_BYTE, APPLE_HOMEKIT_START_BYTE}
|
||||
|
||||
RSSI_SWITCH_THRESHOLD = 6
|
||||
NO_RSSI_VALUE = -1000
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -79,7 +84,7 @@ def _prefer_previous_adv(
|
||||
STALE_ADVERTISEMENT_SECONDS,
|
||||
)
|
||||
return False
|
||||
if new.device.rssi - RSSI_SWITCH_THRESHOLD > old.device.rssi:
|
||||
if new.device.rssi - RSSI_SWITCH_THRESHOLD > (old.device.rssi or NO_RSSI_VALUE):
|
||||
# If new advertisement is RSSI_SWITCH_THRESHOLD more, the new one is preferred
|
||||
if new.source != old.source:
|
||||
_LOGGER.debug(
|
||||
@@ -290,6 +295,19 @@ class BluetoothManager:
|
||||
than the source from the history or the timestamp
|
||||
in the history is older than 180s
|
||||
"""
|
||||
|
||||
# Pre-filter noisy apple devices as they can account for 20-35% of the
|
||||
# traffic on a typical network.
|
||||
advertisement_data = service_info.advertisement
|
||||
manufacturer_data = advertisement_data.manufacturer_data
|
||||
if (
|
||||
len(manufacturer_data) == 1
|
||||
and (apple_data := manufacturer_data.get(APPLE_MFR_ID))
|
||||
and apple_data[0] not in APPLE_START_BYTES_WANTED
|
||||
and not advertisement_data.service_data
|
||||
):
|
||||
return
|
||||
|
||||
device = service_info.device
|
||||
connectable = service_info.connectable
|
||||
address = device.address
|
||||
@@ -299,7 +317,6 @@ class BluetoothManager:
|
||||
return
|
||||
|
||||
self._history[address] = service_info
|
||||
advertisement_data = service_info.advertisement
|
||||
source = service_info.source
|
||||
|
||||
if connectable:
|
||||
@@ -311,12 +328,13 @@ class BluetoothManager:
|
||||
|
||||
matched_domains = self._integration_matcher.match_domains(service_info)
|
||||
_LOGGER.debug(
|
||||
"%s: %s %s connectable: %s match: %s",
|
||||
"%s: %s %s connectable: %s match: %s rssi: %s",
|
||||
source,
|
||||
address,
|
||||
advertisement_data,
|
||||
connectable,
|
||||
matched_domains,
|
||||
device.rssi,
|
||||
)
|
||||
|
||||
for match in self._callback_index.match_callbacks(service_info):
|
||||
@@ -367,11 +385,11 @@ class BluetoothManager:
|
||||
callback_matcher[CONNECTABLE] = matcher.get(CONNECTABLE, True)
|
||||
|
||||
connectable = callback_matcher[CONNECTABLE]
|
||||
self._callback_index.add_with_address(callback_matcher)
|
||||
self._callback_index.add_callback_matcher(callback_matcher)
|
||||
|
||||
@hass_callback
|
||||
def _async_remove_callback() -> None:
|
||||
self._callback_index.remove_with_address(callback_matcher)
|
||||
self._callback_index.remove_callback_matcher(callback_matcher)
|
||||
|
||||
# If we have history for the subscriber, we can trigger the callback
|
||||
# immediately with the last packet so the subscriber can see the
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
"dependencies": ["usb"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.16.0",
|
||||
"bluetooth-adapters==0.3.4",
|
||||
"bluetooth-auto-recovery==0.3.0"
|
||||
"bleak==0.17.0",
|
||||
"bleak-retry-connector==1.17.1",
|
||||
"bluetooth-adapters==0.4.1",
|
||||
"bluetooth-auto-recovery==0.3.3",
|
||||
"dbus-fast==1.5.1"
|
||||
],
|
||||
"codeowners": ["@bdraco"],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -173,36 +173,40 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
self.service_data_uuid_set: set[str] = set()
|
||||
self.manufacturer_id_set: set[int] = set()
|
||||
|
||||
def add(self, matcher: _T) -> None:
|
||||
def add(self, matcher: _T) -> bool:
|
||||
"""Add a matcher to the index.
|
||||
|
||||
Matchers must end up only in one bucket.
|
||||
|
||||
We put them in the bucket that they are most likely to match.
|
||||
"""
|
||||
# Local name is the cheapest to match since its just a dict lookup
|
||||
if LOCAL_NAME in matcher:
|
||||
self.local_name.setdefault(
|
||||
_local_name_to_index_key(matcher[LOCAL_NAME]), []
|
||||
).append(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
# Manufacturer data is 2nd cheapest since its all ints
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id.setdefault(matcher[MANUFACTURER_ID], []).append(
|
||||
matcher
|
||||
)
|
||||
return True
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid.setdefault(matcher[SERVICE_UUID], []).append(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid.setdefault(matcher[SERVICE_DATA_UUID], []).append(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
return True
|
||||
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id.setdefault(matcher[MANUFACTURER_ID], []).append(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
return False
|
||||
|
||||
def remove(self, matcher: _T) -> None:
|
||||
def remove(self, matcher: _T) -> bool:
|
||||
"""Remove a matcher from the index.
|
||||
|
||||
Matchers only end up in one bucket, so once we have
|
||||
@@ -212,19 +216,21 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
self.local_name[_local_name_to_index_key(matcher[LOCAL_NAME])].remove(
|
||||
matcher
|
||||
)
|
||||
return
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid[matcher[SERVICE_UUID]].remove(matcher)
|
||||
return
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid[matcher[SERVICE_DATA_UUID]].remove(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if MANUFACTURER_ID in matcher:
|
||||
self.manufacturer_id[matcher[MANUFACTURER_ID]].remove(matcher)
|
||||
return
|
||||
return True
|
||||
|
||||
if SERVICE_UUID in matcher:
|
||||
self.service_uuid[matcher[SERVICE_UUID]].remove(matcher)
|
||||
return True
|
||||
|
||||
if SERVICE_DATA_UUID in matcher:
|
||||
self.service_data_uuid[matcher[SERVICE_DATA_UUID]].remove(matcher)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def build(self) -> None:
|
||||
"""Rebuild the index sets."""
|
||||
@@ -235,33 +241,36 @@ class BluetoothMatcherIndexBase(Generic[_T]):
|
||||
def match(self, service_info: BluetoothServiceInfoBleak) -> list[_T]:
|
||||
"""Check for a match."""
|
||||
matches = []
|
||||
if len(service_info.name) >= LOCAL_NAME_MIN_MATCH_LENGTH:
|
||||
if service_info.name and len(service_info.name) >= LOCAL_NAME_MIN_MATCH_LENGTH:
|
||||
for matcher in self.local_name.get(
|
||||
service_info.name[:LOCAL_NAME_MIN_MATCH_LENGTH], []
|
||||
):
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for service_data_uuid in self.service_data_uuid_set.intersection(
|
||||
service_info.service_data
|
||||
):
|
||||
for matcher in self.service_data_uuid[service_data_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.service_data_uuid_set and service_info.service_data:
|
||||
for service_data_uuid in self.service_data_uuid_set.intersection(
|
||||
service_info.service_data
|
||||
):
|
||||
for matcher in self.service_data_uuid[service_data_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for manufacturer_id in self.manufacturer_id_set.intersection(
|
||||
service_info.manufacturer_data
|
||||
):
|
||||
for matcher in self.manufacturer_id[manufacturer_id]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.manufacturer_id_set and service_info.manufacturer_data:
|
||||
for manufacturer_id in self.manufacturer_id_set.intersection(
|
||||
service_info.manufacturer_data
|
||||
):
|
||||
for matcher in self.manufacturer_id[manufacturer_id]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
for service_uuid in self.service_uuid_set.intersection(
|
||||
service_info.service_uuids
|
||||
):
|
||||
for matcher in self.service_uuid[service_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
if self.service_uuid_set and service_info.service_uuids:
|
||||
for service_uuid in self.service_uuid_set.intersection(
|
||||
service_info.service_uuids
|
||||
):
|
||||
for matcher in self.service_uuid[service_uuid]:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
|
||||
return matches
|
||||
|
||||
@@ -279,8 +288,11 @@ class BluetoothCallbackMatcherIndex(
|
||||
"""Initialize the matcher index."""
|
||||
super().__init__()
|
||||
self.address: dict[str, list[BluetoothCallbackMatcherWithCallback]] = {}
|
||||
self.connectable: list[BluetoothCallbackMatcherWithCallback] = []
|
||||
|
||||
def add_with_address(self, matcher: BluetoothCallbackMatcherWithCallback) -> None:
|
||||
def add_callback_matcher(
|
||||
self, matcher: BluetoothCallbackMatcherWithCallback
|
||||
) -> None:
|
||||
"""Add a matcher to the index.
|
||||
|
||||
Matchers must end up only in one bucket.
|
||||
@@ -291,10 +303,15 @@ class BluetoothCallbackMatcherIndex(
|
||||
self.address.setdefault(matcher[ADDRESS], []).append(matcher)
|
||||
return
|
||||
|
||||
super().add(matcher)
|
||||
self.build()
|
||||
if super().add(matcher):
|
||||
self.build()
|
||||
return
|
||||
|
||||
def remove_with_address(
|
||||
if CONNECTABLE in matcher:
|
||||
self.connectable.append(matcher)
|
||||
return
|
||||
|
||||
def remove_callback_matcher(
|
||||
self, matcher: BluetoothCallbackMatcherWithCallback
|
||||
) -> None:
|
||||
"""Remove a matcher from the index.
|
||||
@@ -306,8 +323,13 @@ class BluetoothCallbackMatcherIndex(
|
||||
self.address[matcher[ADDRESS]].remove(matcher)
|
||||
return
|
||||
|
||||
super().remove(matcher)
|
||||
self.build()
|
||||
if super().remove(matcher):
|
||||
self.build()
|
||||
return
|
||||
|
||||
if CONNECTABLE in matcher:
|
||||
self.connectable.remove(matcher)
|
||||
return
|
||||
|
||||
def match_callbacks(
|
||||
self, service_info: BluetoothServiceInfoBleak
|
||||
@@ -317,6 +339,9 @@ class BluetoothCallbackMatcherIndex(
|
||||
for matcher in self.address.get(service_info.address, []):
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
for matcher in self.connectable:
|
||||
if ble_device_matches(matcher, service_info):
|
||||
matches.append(matcher)
|
||||
return matches
|
||||
|
||||
|
||||
@@ -347,12 +372,9 @@ def ble_device_matches(
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
) -> bool:
|
||||
"""Check if a ble device and advertisement_data matches the matcher."""
|
||||
device = service_info.device
|
||||
|
||||
# Don't check address here since all callers already
|
||||
# check the address and we don't want to double check
|
||||
# since it would result in an unreachable reject case.
|
||||
|
||||
if matcher.get(CONNECTABLE, True) and not service_info.connectable:
|
||||
return False
|
||||
|
||||
@@ -379,7 +401,8 @@ def ble_device_matches(
|
||||
return False
|
||||
|
||||
if (local_name := matcher.get(LOCAL_NAME)) and (
|
||||
(device_name := advertisement_data.local_name or device.name) is None
|
||||
(device_name := advertisement_data.local_name or service_info.device.name)
|
||||
is None
|
||||
or not _memorized_fnmatch(
|
||||
device_name,
|
||||
local_name,
|
||||
|
||||
@@ -17,7 +17,7 @@ from bleak.backends.bluezdbus.advertisement_monitor import OrPattern
|
||||
from bleak.backends.bluezdbus.scanner import BlueZScannerArgs
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak.backends.scanner import AdvertisementData
|
||||
from dbus_next import InvalidMessageError
|
||||
from dbus_fast import InvalidMessageError
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import (
|
||||
|
||||
@@ -46,7 +46,7 @@ async def async_get_bluetooth_adapters() -> dict[str, AdapterDetails]:
|
||||
adapters[adapter] = AdapterDetails(
|
||||
address=adapter1["Address"],
|
||||
sw_version=adapter1["Name"], # This is actually the BlueZ version
|
||||
hw_version=adapter1["Modalias"],
|
||||
hw_version=adapter1.get("Modalias"),
|
||||
passive_scan="org.bluez.AdvertisementMonitorManager1" in details,
|
||||
)
|
||||
return adapters
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bmw_connected_drive",
|
||||
"name": "BMW Connected Drive",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"requirements": ["bimmer_connected==0.10.2"],
|
||||
"requirements": ["bimmer_connected==0.10.4"],
|
||||
"codeowners": ["@gerard33", "@rikroe"],
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Config flow for Bond integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
@@ -83,7 +84,10 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
instead ask them to manually enter the token.
|
||||
"""
|
||||
host = self._discovered[CONF_HOST]
|
||||
if not (token := await async_get_token(self.hass, host)):
|
||||
try:
|
||||
if not (token := await async_get_token(self.hass, host)):
|
||||
return
|
||||
except asyncio.TimeoutError:
|
||||
return
|
||||
|
||||
self._discovered[CONF_ACCESS_TOKEN] = token
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Bosch SHC",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||
"requirements": ["boschshcpy==0.2.30"],
|
||||
"requirements": ["boschshcpy==0.2.35"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "bosch shc*" }],
|
||||
"iot_class": "local_push",
|
||||
"codeowners": ["@tschamm"],
|
||||
|
||||
@@ -7,7 +7,13 @@ from functools import wraps
|
||||
import logging
|
||||
from typing import Any, Final, TypeVar
|
||||
|
||||
from pybravia import BraviaTV, BraviaTVError, BraviaTVNotFound
|
||||
from pybravia import (
|
||||
BraviaTV,
|
||||
BraviaTVConnectionError,
|
||||
BraviaTVConnectionTimeout,
|
||||
BraviaTVError,
|
||||
BraviaTVNotFound,
|
||||
)
|
||||
from typing_extensions import Concatenate, ParamSpec
|
||||
|
||||
from homeassistant.components.media_player.const import (
|
||||
@@ -130,6 +136,10 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER.debug("Update skipped, Bravia API service is reloading")
|
||||
return
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
except (BraviaTVConnectionError, BraviaTVConnectionTimeout):
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
_LOGGER.debug("Update skipped, Bravia TV is off")
|
||||
except BraviaTVError as err:
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "braviatv",
|
||||
"name": "Sony Bravia TV",
|
||||
"documentation": "https://www.home-assistant.io/integrations/braviatv",
|
||||
"requirements": ["pybravia==0.2.0"],
|
||||
"requirements": ["pybravia==0.2.2"],
|
||||
"codeowners": ["@bieniu", "@Drafteed"],
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "bt_smarthub",
|
||||
"name": "BT Smart Hub",
|
||||
"documentation": "https://www.home-assistant.io/integrations/bt_smarthub",
|
||||
"requirements": ["btsmarthub_devicelist==0.2.0"],
|
||||
"requirements": ["btsmarthub_devicelist==0.2.2"],
|
||||
"codeowners": ["@jxwolstenholme"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["btsmarthub_devicelist"]
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""The BThome Bluetooth integration."""
|
||||
"""The BTHome Bluetooth integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from bthome_ble import BThomeBluetoothDeviceData, SensorUpdate
|
||||
from bthome_ble import BTHomeBluetoothDeviceData, SensorUpdate
|
||||
from bthome_ble.parser import EncryptionScheme
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
def process_service_info(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
data: BThomeBluetoothDeviceData,
|
||||
data: BTHomeBluetoothDeviceData,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
) -> SensorUpdate:
|
||||
"""Process a BluetoothServiceInfoBleak, running side effects and returning sensor data."""
|
||||
@@ -40,14 +40,14 @@ def process_service_info(
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up BThome Bluetooth from a config entry."""
|
||||
"""Set up BTHome Bluetooth from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
|
||||
kwargs = {}
|
||||
if bindkey := entry.data.get("bindkey"):
|
||||
kwargs["bindkey"] = bytes.fromhex(bindkey)
|
||||
data = BThomeBluetoothDeviceData(**kwargs)
|
||||
data = BTHomeBluetoothDeviceData(**kwargs)
|
||||
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Config flow for BThome Bluetooth integration."""
|
||||
"""Config flow for BTHome Bluetooth integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from bthome_ble import BThomeBluetoothDeviceData as DeviceData
|
||||
from bthome_ble import BTHomeBluetoothDeviceData as DeviceData
|
||||
from bthome_ble.parser import EncryptionScheme
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -34,8 +34,8 @@ def _title(discovery_info: BluetoothServiceInfo, device: DeviceData) -> str:
|
||||
return device.title or device.get_device_name() or discovery_info.name
|
||||
|
||||
|
||||
class BThomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for BThome Bluetooth."""
|
||||
class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for BTHome Bluetooth."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@@ -68,7 +68,7 @@ class BThomeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_get_encryption_key(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Enter a bindkey for an encrypted BThome device."""
|
||||
"""Enter a bindkey for an encrypted BTHome device."""
|
||||
assert self._discovery_info
|
||||
assert self._discovered_device
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
"""Constants for the BThome Bluetooth integration."""
|
||||
"""Constants for the BTHome Bluetooth integration."""
|
||||
|
||||
DOMAIN = "bthome"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for BThome Bluetooth devices."""
|
||||
"""Support for BTHome Bluetooth devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from bthome_ble import DeviceKey, SensorDeviceInfo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "bthome",
|
||||
"name": "BThome",
|
||||
"name": "BTHome",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"bluetooth": [
|
||||
@@ -13,7 +13,7 @@
|
||||
"service_data_uuid": "0000181e-0000-1000-8000-00805f9b34fb"
|
||||
}
|
||||
],
|
||||
"requirements": ["bthome-ble==0.5.2"],
|
||||
"requirements": ["bthome-ble==1.0.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@Ernst79"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for BThome sensors."""
|
||||
"""Support for BTHome sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
@@ -202,26 +202,26 @@ async def async_setup_entry(
|
||||
entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BThome BLE sensors."""
|
||||
"""Set up the BTHome BLE sensors."""
|
||||
coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
BThomeBluetoothSensorEntity, async_add_entities
|
||||
BTHomeBluetoothSensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
|
||||
|
||||
class BThomeBluetoothSensorEntity(
|
||||
class BTHomeBluetoothSensorEntity(
|
||||
PassiveBluetoothProcessorEntity[
|
||||
PassiveBluetoothDataProcessor[Optional[Union[float, int]]]
|
||||
],
|
||||
SensorEntity,
|
||||
):
|
||||
"""Representation of a BThome BLE sensor."""
|
||||
"""Representation of a BTHome BLE sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
|
||||
@@ -47,7 +47,7 @@ SERVICE_CONFIGURE = "configure"
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL, default=DEFAULT_INITIAL): cv.positive_int,
|
||||
vol.Required(CONF_NAME): vol.All(cv.string, vol.Length(min=1)),
|
||||
@@ -57,16 +57,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MAXIMUM): vol.Any(None, vol.Coerce(int)),
|
||||
vol.Optional(CONF_MINIMUM): vol.Any(None, vol.Coerce(int)),
|
||||
vol.Optional(CONF_RESTORE): cv.boolean,
|
||||
vol.Optional(CONF_STEP): cv.positive_int,
|
||||
}
|
||||
|
||||
|
||||
def _none_to_empty_dict(value):
|
||||
if value is None:
|
||||
@@ -128,7 +118,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment")
|
||||
@@ -152,12 +142,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class CounterStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -166,8 +155,8 @@ class CounterStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class Counter(RestoreEntity):
|
||||
|
||||
@@ -43,6 +43,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
except AuthenticationRequired as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
if not hass.data[DOMAIN]:
|
||||
async_setup_services(hass)
|
||||
|
||||
gateway = hass.data[DOMAIN][config_entry.entry_id] = DeconzGateway(
|
||||
hass, config_entry, api
|
||||
)
|
||||
@@ -53,9 +56,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
await async_setup_events(gateway)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
if len(hass.data[DOMAIN]) == 1:
|
||||
async_setup_services(hass)
|
||||
|
||||
api.start()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
|
||||
@@ -11,8 +11,9 @@
|
||||
"dhcp",
|
||||
"energy",
|
||||
"frontend",
|
||||
"homeassistant_alerts",
|
||||
"hardware",
|
||||
"history",
|
||||
"homeassistant_alerts",
|
||||
"input_boolean",
|
||||
"input_button",
|
||||
"input_datetime",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "dhcp",
|
||||
"name": "DHCP Discovery",
|
||||
"documentation": "https://www.home-assistant.io/integrations/dhcp",
|
||||
"requirements": ["scapy==2.4.5", "aiodiscover==1.4.11"],
|
||||
"requirements": ["scapy==2.4.5", "aiodiscover==1.4.13"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"quality_scale": "internal",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -29,7 +29,7 @@ from .const import DOMAIN, ECOBEE_MODEL_TO_NAME, MANUFACTURER
|
||||
class EcobeeSensorEntityDescriptionMixin:
|
||||
"""Represent the required ecobee entity description attributes."""
|
||||
|
||||
runtime_key: str
|
||||
runtime_key: str | None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -46,7 +46,7 @@ SENSOR_TYPES: tuple[EcobeeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=TEMP_FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
runtime_key="actualTemperature",
|
||||
runtime_key=None,
|
||||
),
|
||||
EcobeeSensorEntityDescription(
|
||||
key="humidity",
|
||||
@@ -54,7 +54,7 @@ SENSOR_TYPES: tuple[EcobeeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
runtime_key="actualHumidity",
|
||||
runtime_key=None,
|
||||
),
|
||||
EcobeeSensorEntityDescription(
|
||||
key="co2PPM",
|
||||
@@ -194,6 +194,11 @@ class EcobeeSensor(SensorEntity):
|
||||
for item in sensor["capability"]:
|
||||
if item["type"] != self.entity_description.key:
|
||||
continue
|
||||
thermostat = self.data.ecobee.get_thermostat(self.index)
|
||||
self._state = thermostat["runtime"][self.entity_description.runtime_key]
|
||||
if self.entity_description.runtime_key is None:
|
||||
self._state = item["value"]
|
||||
else:
|
||||
thermostat = self.data.ecobee.get_thermostat(self.index)
|
||||
self._state = thermostat["runtime"][
|
||||
self.entity_description.runtime_key
|
||||
]
|
||||
break
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
"domain": "ecovacs",
|
||||
"name": "Ecovacs",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"requirements": ["sucks==0.9.4"],
|
||||
"codeowners": ["@OverloadUT"],
|
||||
"requirements": ["py-sucks==0.9.8"],
|
||||
"codeowners": ["@OverloadUT", "@mib1185"],
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks"]
|
||||
}
|
||||
|
||||
@@ -44,6 +44,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
|
||||
@@ -68,4 +68,4 @@ class EcowittBinarySensorEntity(EcowittEntity, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.ecowitt.value > 0
|
||||
return bool(self.ecowitt.value)
|
||||
|
||||
@@ -25,13 +25,13 @@ async def async_get_device_diagnostics(
|
||||
"device": {
|
||||
"name": station.station,
|
||||
"model": station.model,
|
||||
"frequency": station.frequency,
|
||||
"frequency": station.frequence,
|
||||
"version": station.version,
|
||||
},
|
||||
"raw": ecowitt.last_values[station_id],
|
||||
"sensors": {
|
||||
sensor.key: sensor.value
|
||||
for sensor in station.sensors
|
||||
for sensor in ecowitt.sensors.values()
|
||||
if sensor.station.key == station_id
|
||||
},
|
||||
}
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
"name": "Ecowitt",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"requirements": ["aioecowitt==2022.08.3"],
|
||||
"dependencies": ["webhook"],
|
||||
"requirements": ["aioecowitt==2022.09.1"],
|
||||
"codeowners": ["@pvizeli"],
|
||||
"iot_class": "local_push"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
"""Support for Ecowitt Weather Stations."""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioecowitt import EcoWittListener, EcoWittSensor, EcoWittSensorTypes
|
||||
@@ -242,6 +245,6 @@ class EcowittSensorEntity(EcowittEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.ecowitt.value
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.const import (
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
ENERGY_MEGA_WATT_HOUR,
|
||||
ENERGY_WATT_HOUR,
|
||||
VOLUME_CUBIC_FEET,
|
||||
VOLUME_CUBIC_METERS,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -44,7 +45,7 @@ SUPPORTED_STATE_CLASSES = [
|
||||
SensorStateClass.TOTAL_INCREASING,
|
||||
]
|
||||
VALID_ENERGY_UNITS = [ENERGY_WATT_HOUR, ENERGY_KILO_WATT_HOUR, ENERGY_MEGA_WATT_HOUR]
|
||||
VALID_ENERGY_UNITS_GAS = [VOLUME_CUBIC_METERS] + VALID_ENERGY_UNITS
|
||||
VALID_ENERGY_UNITS_GAS = [VOLUME_CUBIC_FEET, VOLUME_CUBIC_METERS] + VALID_ENERGY_UNITS
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Epson",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/epson",
|
||||
"requirements": ["epson-projector==0.4.6"],
|
||||
"requirements": ["epson-projector==0.5.0"],
|
||||
"codeowners": ["@pszafer"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["epson_projector"]
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from epson_projector import Projector
|
||||
from epson_projector import Projector, ProjectorUnavailableError
|
||||
from epson_projector.const import (
|
||||
BACK,
|
||||
BUSY,
|
||||
@@ -20,7 +20,6 @@ from epson_projector.const import (
|
||||
POWER,
|
||||
SOURCE,
|
||||
SOURCE_LIST,
|
||||
STATE_UNAVAILABLE as EPSON_STATE_UNAVAILABLE,
|
||||
TURN_OFF,
|
||||
TURN_ON,
|
||||
VOL_DOWN,
|
||||
@@ -123,11 +122,16 @@ class EpsonProjectorMediaPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state of device."""
|
||||
power_state = await self._projector.get_power()
|
||||
_LOGGER.debug("Projector status: %s", power_state)
|
||||
if not power_state or power_state == EPSON_STATE_UNAVAILABLE:
|
||||
try:
|
||||
power_state = await self._projector.get_power()
|
||||
except ProjectorUnavailableError as ex:
|
||||
_LOGGER.debug("Projector is unavailable: %s", ex)
|
||||
self._attr_available = False
|
||||
return
|
||||
if not power_state:
|
||||
self._attr_available = False
|
||||
return
|
||||
_LOGGER.debug("Projector status: %s", power_state)
|
||||
self._attr_available = True
|
||||
if power_state == EPSON_CODES[POWER]:
|
||||
self._attr_state = STATE_ON
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||
"requirements": ["flux_led==0.28.31"],
|
||||
"requirements": ["flux_led==0.28.32"],
|
||||
"quality_scale": "platinum",
|
||||
"codeowners": ["@icemanch", "@bdraco"],
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20220902.0"],
|
||||
"requirements": ["home-assistant-frontend==20220907.2"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -51,6 +51,7 @@ SETTINGS_TO_REDACT = {
|
||||
"sebExamKey",
|
||||
"sebConfigKey",
|
||||
"kioskPinEnc",
|
||||
"remoteAdminPasswordEnc",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -260,8 +260,6 @@ class BrightnessTrait(_Trait):
|
||||
brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS)
|
||||
if brightness is not None:
|
||||
response["brightness"] = round(100 * (brightness / 255))
|
||||
else:
|
||||
response["brightness"] = 0
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@@ -12,11 +12,21 @@
|
||||
"service_uuid": "00008451-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 63391,
|
||||
"service_uuid": "00008351-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 26589,
|
||||
"service_uuid": "00008351-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 57391,
|
||||
"service_uuid": "00008351-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 18994,
|
||||
"service_uuid": "00008551-0000-1000-8000-00805f9b34fb",
|
||||
@@ -27,6 +37,11 @@
|
||||
"service_uuid": "00008551-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 43682,
|
||||
"service_uuid": "00008151-0000-1000-8000-00805f9b34fb",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 59970,
|
||||
"service_uuid": "00008151-0000-1000-8000-00805f9b34fb",
|
||||
@@ -53,7 +68,7 @@
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"requirements": ["govee-ble==0.17.1"],
|
||||
"requirements": ["govee-ble==0.19.0"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -92,7 +92,10 @@ class ValveControllerSwitch(ValveControllerEntity, SwitchEntity):
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the valve off (closed)."""
|
||||
"""Turn the switch off."""
|
||||
if not self._attr_is_on:
|
||||
return
|
||||
|
||||
try:
|
||||
async with self._client:
|
||||
await self._client.valve.close()
|
||||
@@ -103,7 +106,10 @@ class ValveControllerSwitch(ValveControllerEntity, SwitchEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the valve on (open)."""
|
||||
"""Turn the switch on."""
|
||||
if self._attr_is_on:
|
||||
return
|
||||
|
||||
try:
|
||||
async with self._client:
|
||||
await self._client.valve.open()
|
||||
|
||||
@@ -143,7 +143,6 @@ class HistoryStatsSensorBase(
|
||||
class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
"""A HistoryStats sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.DURATION
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
@@ -157,6 +156,8 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
self._attr_native_unit_of_measurement = UNITS[sensor_type]
|
||||
self._type = sensor_type
|
||||
self._process_update()
|
||||
if self._type == CONF_TYPE_TIME:
|
||||
self._attr_device_class = SensorDeviceClass.DURATION
|
||||
|
||||
@callback
|
||||
def _process_update(self) -> None:
|
||||
|
||||
@@ -423,7 +423,7 @@ class HKDevice:
|
||||
if self._polling_interval_remover:
|
||||
self._polling_interval_remover()
|
||||
|
||||
await self.pairing.close()
|
||||
await self.pairing.shutdown()
|
||||
|
||||
await self.hass.config_entries.async_unload_platforms(
|
||||
self.config_entry, self.platforms
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit==1.5.1"],
|
||||
"requirements": ["aiohomekit==1.5.12"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."],
|
||||
"bluetooth": [{ "manufacturer_id": 76, "manufacturer_data_start": [6] }],
|
||||
"dependencies": ["bluetooth", "zeroconf"],
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "imap",
|
||||
"name": "IMAP",
|
||||
"documentation": "https://www.home-assistant.io/integrations/imap",
|
||||
"requirements": ["aioimaplib==1.0.0"],
|
||||
"requirements": ["aioimaplib==1.0.1"],
|
||||
"codeowners": [],
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioimaplib"]
|
||||
|
||||
@@ -37,20 +37,25 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_INITIAL = "initial"
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: cv.schema_with_slug_keys(vol.Any(UPDATE_FIELDS, None))},
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
},
|
||||
None,
|
||||
)
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -62,12 +67,11 @@ STORAGE_VERSION = 1
|
||||
class InputBooleanStorageCollection(collection.StorageCollection):
|
||||
"""Input boolean collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -76,8 +80,8 @@ class InputBooleanStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
@bind_hass
|
||||
@@ -118,7 +122,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
|
||||
@@ -30,18 +30,23 @@ DOMAIN = "input_button"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: cv.schema_with_slug_keys(vol.Any(UPDATE_FIELDS, None))},
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
},
|
||||
None,
|
||||
)
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -53,12 +58,11 @@ STORAGE_VERSION = 1
|
||||
class InputButtonStorageCollection(collection.StorageCollection):
|
||||
"""Input button collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
||||
async def _process_create_data(self, data: dict) -> vol.Schema:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -67,8 +71,8 @@ class InputButtonStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return {**data, **update_data}
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -103,7 +107,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
|
||||
@@ -61,20 +61,13 @@ def validate_set_datetime_attrs(config):
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_HAS_DATE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_HAS_TIME, default=False): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_HAS_DATE): cv.boolean,
|
||||
vol.Optional(CONF_HAS_TIME): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
}
|
||||
|
||||
|
||||
def has_date_or_time(conf):
|
||||
@@ -167,7 +160,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -213,12 +206,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class DateTimeStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, has_date_or_time))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, has_date_or_time))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -227,8 +219,8 @@ class DateTimeStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return has_date_or_time({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputDatetime(RestoreEntity):
|
||||
|
||||
@@ -65,7 +65,7 @@ def _cv_input_number(cfg):
|
||||
return cfg
|
||||
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Required(CONF_MIN): vol.Coerce(float),
|
||||
vol.Required(CONF_MAX): vol.Coerce(float),
|
||||
@@ -76,17 +76,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_MODE, default=MODE_SLIDER): vol.In([MODE_BOX, MODE_SLIDER]),
|
||||
}
|
||||
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN): vol.Coerce(float),
|
||||
vol.Optional(CONF_MAX): vol.Coerce(float),
|
||||
vol.Optional(CONF_INITIAL): vol.Coerce(float),
|
||||
vol.Optional(CONF_STEP): vol.All(vol.Coerce(float), vol.Range(min=1e-9)),
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.In([MODE_BOX, MODE_SLIDER]),
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
@@ -148,7 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -184,22 +173,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class NumberStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_number))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_number))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
"""Suggest an ID based on the config."""
|
||||
return info[CONF_NAME]
|
||||
|
||||
async def _async_load_data(self) -> dict | None:
|
||||
"""Load the data.
|
||||
|
||||
A past bug caused frontend to add initial value to all input numbers.
|
||||
This drops that.
|
||||
"""
|
||||
data = await super()._async_load_data()
|
||||
|
||||
if data is None:
|
||||
return data
|
||||
|
||||
for number in data["items"]:
|
||||
number.pop(CONF_INITIAL, None)
|
||||
|
||||
return data
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_number({**data, **update_data})
|
||||
update_data = self.SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputNumber(RestoreEntity):
|
||||
|
||||
@@ -56,7 +56,7 @@ def _unique(options: Any) -> Any:
|
||||
raise HomeAssistantError("Duplicate options are not allowed") from exc
|
||||
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), _unique, [cv.string]
|
||||
@@ -64,14 +64,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_OPTIONS): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), _unique, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}
|
||||
|
||||
|
||||
def _remove_duplicates(options: list[str], name: str | None) -> list[str]:
|
||||
@@ -172,7 +164,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -238,12 +230,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InputSelectStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_select))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_select))
|
||||
|
||||
async def _process_create_data(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate the config is valid."""
|
||||
return cast(dict[str, Any], self.CREATE_SCHEMA(data))
|
||||
return cast(dict[str, Any], self.CREATE_UPDATE_SCHEMA(data))
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict[str, Any]) -> str:
|
||||
@@ -254,8 +245,8 @@ class InputSelectStorageCollection(collection.StorageCollection):
|
||||
self, data: dict[str, Any], update_data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_select({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputSelect(SelectEntity, RestoreEntity):
|
||||
|
||||
@@ -51,7 +51,7 @@ SERVICE_SET_VALUE = "set_value"
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CREATE_FIELDS = {
|
||||
STORAGE_FIELDS = {
|
||||
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
@@ -61,16 +61,6 @@ CREATE_FIELDS = {
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In([MODE_TEXT, MODE_PASSWORD]),
|
||||
}
|
||||
UPDATE_FIELDS = {
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.In([MODE_TEXT, MODE_PASSWORD]),
|
||||
}
|
||||
|
||||
|
||||
def _cv_input_text(cfg):
|
||||
@@ -147,7 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await storage_collection.async_load()
|
||||
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||
@@ -177,12 +167,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InputTextStorageCollection(collection.StorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_text))
|
||||
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(vol.All(STORAGE_FIELDS, _cv_input_text))
|
||||
|
||||
async def _process_create_data(self, data: dict) -> dict:
|
||||
"""Validate the config is valid."""
|
||||
return self.CREATE_SCHEMA(data)
|
||||
return self.CREATE_UPDATE_SCHEMA(data)
|
||||
|
||||
@callback
|
||||
def _get_suggested_id(self, info: dict) -> str:
|
||||
@@ -191,8 +180,8 @@ class InputTextStorageCollection(collection.StorageCollection):
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.UPDATE_SCHEMA(update_data)
|
||||
return _cv_input_text({**data, **update_data})
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class InputText(RestoreEntity):
|
||||
|
||||
@@ -1,19 +1,57 @@
|
||||
"""Component for the Portuguese weather service - IPMA."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
from pyipma import IPMAException
|
||||
from pyipma.api import IPMA_API
|
||||
from pyipma.location import Location
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .config_flow import IpmaFlowHandler # noqa: F401
|
||||
from .const import DOMAIN # noqa: F401
|
||||
from .const import DATA_API, DATA_LOCATION, DOMAIN
|
||||
|
||||
DEFAULT_NAME = "ipma"
|
||||
|
||||
PLATFORMS = [Platform.WEATHER]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up IPMA station as config entry."""
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
latitude = config_entry.data[CONF_LATITUDE]
|
||||
longitude = config_entry.data[CONF_LONGITUDE]
|
||||
|
||||
api = IPMA_API(async_get_clientsession(hass))
|
||||
|
||||
try:
|
||||
async with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
except (IPMAException, asyncio.TimeoutError) as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not get location for ({latitude},{longitude})"
|
||||
) from err
|
||||
|
||||
_LOGGER.debug(
|
||||
"Initializing for coordinates %s, %s -> station %s (%d, %d)",
|
||||
latitude,
|
||||
longitude,
|
||||
location.station,
|
||||
location.id_station,
|
||||
location.global_id_local,
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][config_entry.entry_id] = {DATA_API: api, DATA_LOCATION: location}
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -6,3 +6,6 @@ DOMAIN = "ipma"
|
||||
HOME_LOCATION_NAME = "Home"
|
||||
|
||||
ENTITY_ID_SENSOR_FORMAT_HOME = f"{WEATHER_DOMAIN}.ipma_{HOME_LOCATION_NAME}"
|
||||
|
||||
DATA_LOCATION = "location"
|
||||
DATA_API = "api"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Instituto Portugu\u00eas do Mar e Atmosfera (IPMA)",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ipma",
|
||||
"requirements": ["pyipma==3.0.2"],
|
||||
"requirements": ["pyipma==3.0.5"],
|
||||
"codeowners": ["@dgomes", "@abmantis"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["geopy", "pyipma"]
|
||||
|
||||
@@ -48,11 +48,12 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.sun import is_up
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import DATA_API, DATA_LOCATION, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTION = "Instituto Português do Mar e Atmosfera"
|
||||
@@ -95,13 +96,10 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add a weather entity from a config_entry."""
|
||||
latitude = config_entry.data[CONF_LATITUDE]
|
||||
longitude = config_entry.data[CONF_LONGITUDE]
|
||||
api = hass.data[DOMAIN][config_entry.entry_id][DATA_API]
|
||||
location = hass.data[DOMAIN][config_entry.entry_id][DATA_LOCATION]
|
||||
mode = config_entry.data[CONF_MODE]
|
||||
|
||||
api = await async_get_api(hass)
|
||||
location = await async_get_location(hass, api, latitude, longitude)
|
||||
|
||||
# Migrate old unique_id
|
||||
@callback
|
||||
def _async_migrator(entity_entry: entity_registry.RegistryEntry):
|
||||
@@ -127,29 +125,6 @@ async def async_setup_entry(
|
||||
async_add_entities([IPMAWeather(location, api, config_entry.data)], True)
|
||||
|
||||
|
||||
async def async_get_api(hass):
|
||||
"""Get the pyipma api object."""
|
||||
websession = async_get_clientsession(hass)
|
||||
return IPMA_API(websession)
|
||||
|
||||
|
||||
async def async_get_location(hass, api, latitude, longitude):
|
||||
"""Retrieve pyipma location, location name to be used as the entity name."""
|
||||
async with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Initializing for coordinates %s, %s -> station %s (%d, %d)",
|
||||
latitude,
|
||||
longitude,
|
||||
location.station,
|
||||
location.id_station,
|
||||
location.global_id_local,
|
||||
)
|
||||
|
||||
return location
|
||||
|
||||
|
||||
class IPMAWeather(WeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import pyiss
|
||||
@@ -18,7 +18,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -27,31 +27,25 @@ class IssData:
|
||||
|
||||
number_of_people_in_space: int
|
||||
current_location: dict[str, str]
|
||||
is_above: bool
|
||||
next_rise: datetime
|
||||
|
||||
|
||||
def update(iss: pyiss.ISS, latitude: float, longitude: float) -> IssData:
|
||||
def update(iss: pyiss.ISS) -> IssData:
|
||||
"""Retrieve data from the pyiss API."""
|
||||
return IssData(
|
||||
number_of_people_in_space=iss.number_of_people_in_space(),
|
||||
current_location=iss.current_location(),
|
||||
is_above=iss.is_ISS_above(latitude, longitude),
|
||||
next_rise=iss.next_rise(latitude, longitude),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
latitude = hass.config.latitude
|
||||
longitude = hass.config.longitude
|
||||
|
||||
iss = pyiss.ISS()
|
||||
|
||||
async def async_update() -> IssData:
|
||||
try:
|
||||
return await hass.async_add_executor_job(update, iss, latitude, longitude)
|
||||
return await hass.async_add_executor_job(update, iss)
|
||||
except (HTTPError, requests.exceptions.ConnectionError) as ex:
|
||||
raise UpdateFailed("Unable to retrieve data") from ex
|
||||
|
||||
|
||||
@@ -7,9 +7,10 @@ from homeassistant.const import CONF_NAME, CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .binary_sensor import DEFAULT_NAME
|
||||
from .const import DOMAIN
|
||||
|
||||
DEFAULT_NAME = "ISS"
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for iss component."""
|
||||
@@ -30,10 +31,6 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Check if location have been defined.
|
||||
if not self.hass.config.latitude and not self.hass.config.longitude:
|
||||
return self.async_abort(reason="latitude_longitude_not_defined")
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=user_input.get(CONF_NAME, DEFAULT_NAME),
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Support for iss binary sensor."""
|
||||
"""Support for iss sensor."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -19,12 +19,6 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ISS_NEXT_RISE = "next_rise"
|
||||
ATTR_ISS_NUMBER_PEOPLE_SPACE = "number_of_people_in_space"
|
||||
|
||||
DEFAULT_NAME = "ISS"
|
||||
DEFAULT_DEVICE_CLASS = "visible"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -37,15 +31,11 @@ async def async_setup_entry(
|
||||
name = entry.title
|
||||
show_on_map = entry.options.get(CONF_SHOW_ON_MAP, False)
|
||||
|
||||
async_add_entities([IssBinarySensor(coordinator, name, show_on_map)])
|
||||
async_add_entities([IssSensor(coordinator, name, show_on_map)])
|
||||
|
||||
|
||||
class IssBinarySensor(
|
||||
CoordinatorEntity[DataUpdateCoordinator[IssData]], BinarySensorEntity
|
||||
):
|
||||
"""Implementation of the ISS binary sensor."""
|
||||
|
||||
_attr_device_class = DEFAULT_DEVICE_CLASS
|
||||
class IssSensor(CoordinatorEntity[DataUpdateCoordinator[IssData]], SensorEntity):
|
||||
"""Implementation of the ISS sensor."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator[IssData], name: str, show: bool
|
||||
@@ -57,17 +47,14 @@ class IssBinarySensor(
|
||||
self._show_on_map = show
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.coordinator.data.is_above is True
|
||||
def native_value(self) -> int:
|
||||
"""Return number of people in space."""
|
||||
return self.coordinator.data.number_of_people_in_space
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
attrs = {
|
||||
ATTR_ISS_NUMBER_PEOPLE_SPACE: self.coordinator.data.number_of_people_in_space,
|
||||
ATTR_ISS_NEXT_RISE: self.coordinator.data.next_rise,
|
||||
}
|
||||
attrs = {}
|
||||
if self._show_on_map:
|
||||
attrs[ATTR_LONGITUDE] = self.coordinator.data.current_location.get(
|
||||
"longitude"
|
||||
@@ -75,7 +75,7 @@ class ISYEntity(Entity):
|
||||
# New state attributes may be available, update the state.
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.hass.bus.fire("isy994_control", event_data)
|
||||
self.hass.bus.async_fire("isy994_control", event_data)
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo | None:
|
||||
|
||||
@@ -13,6 +13,7 @@ from demetriek import (
|
||||
Model,
|
||||
Notification,
|
||||
NotificationIconType,
|
||||
NotificationPriority,
|
||||
NotificationSound,
|
||||
Simple,
|
||||
Sound,
|
||||
@@ -227,6 +228,7 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
|
||||
await lametric.notify(
|
||||
notification=Notification(
|
||||
priority=NotificationPriority.CRITICAL,
|
||||
icon_type=NotificationIconType.INFO,
|
||||
model=Model(
|
||||
cycles=2,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "lametric",
|
||||
"name": "LaMetric",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lametric",
|
||||
"requirements": ["demetriek==0.2.2"],
|
||||
"requirements": ["demetriek==0.2.4"],
|
||||
"codeowners": ["@robbiet480", "@frenck"],
|
||||
"iot_class": "local_polling",
|
||||
"dependencies": ["application_credentials"],
|
||||
|
||||
@@ -21,6 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_CYCLES, CONF_ICON_TYPE, CONF_PRIORITY, CONF_SOUND, DOMAIN
|
||||
from .coordinator import LaMetricDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
@@ -31,8 +32,10 @@ async def async_get_service(
|
||||
"""Get the LaMetric notification service."""
|
||||
if discovery_info is None:
|
||||
return None
|
||||
lametric: LaMetricDevice = hass.data[DOMAIN][discovery_info["entry_id"]]
|
||||
return LaMetricNotificationService(lametric)
|
||||
coordinator: LaMetricDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
discovery_info["entry_id"]
|
||||
]
|
||||
return LaMetricNotificationService(coordinator.lametric)
|
||||
|
||||
|
||||
class LaMetricNotificationService(BaseNotificationService):
|
||||
|
||||
@@ -31,9 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
_LOGGER.info("Polling on %s", entry.data[CONF_DEVICE])
|
||||
return await hass.async_add_executor_job(api.read)
|
||||
|
||||
# No automatic polling and no initial refresh of data is being done at this point,
|
||||
# to prevent battery drain. The user will have to do it manually.
|
||||
|
||||
# Polling is only daily to prevent battery drain.
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_DEVICE
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, ULTRAHEAT_TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,6 +43,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
dev_path = await self.hass.async_add_executor_job(
|
||||
get_serial_by_id, user_input[CONF_DEVICE]
|
||||
)
|
||||
_LOGGER.debug("Using this path : %s", dev_path)
|
||||
|
||||
try:
|
||||
return await self.validate_and_create_entry(dev_path)
|
||||
@@ -76,6 +77,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Try to connect to the device path and return an entry."""
|
||||
model, device_number = await self.validate_ultraheat(dev_path)
|
||||
|
||||
_LOGGER.debug("Got model %s and device_number %s", model, device_number)
|
||||
await self.async_set_unique_id(device_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
data = {
|
||||
@@ -94,7 +96,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
reader = UltraheatReader(port)
|
||||
heat_meter = HeatMeterService(reader)
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
async with async_timeout.timeout(ULTRAHEAT_TIMEOUT):
|
||||
# validate and retrieve the model and device number for a unique id
|
||||
data = await self.hass.async_add_executor_job(heat_meter.read)
|
||||
_LOGGER.debug("Got data from Ultraheat API: %s", data)
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.helpers.entity import EntityCategory
|
||||
DOMAIN = "landisgyr_heat_meter"
|
||||
|
||||
GJ_TO_MWH = 0.277778 # conversion factor
|
||||
ULTRAHEAT_TIMEOUT = 30 # reading the IR port can take some time
|
||||
|
||||
HEAT_METER_SENSOR_TYPES = (
|
||||
SensorEntityDescription(
|
||||
|
||||
@@ -6,7 +6,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE, get_device
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher
|
||||
@@ -27,7 +27,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up LED BLE from a config entry."""
|
||||
address: str = entry.data[CONF_ADDRESS]
|
||||
ble_device = bluetooth.async_ble_device_from_address(hass, address.upper(), True)
|
||||
ble_device = bluetooth.async_ble_device_from_address(
|
||||
hass, address.upper(), True
|
||||
) or await get_device(address)
|
||||
if not ble_device:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not find LED BLE device with address {address}"
|
||||
|
||||
@@ -48,12 +48,12 @@ class LEDBLEEntity(CoordinatorEntity, LightEntity):
|
||||
"""Initialize an ledble light."""
|
||||
super().__init__(coordinator)
|
||||
self._device = device
|
||||
self._attr_unique_id = device._address
|
||||
self._attr_unique_id = device.address
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=name,
|
||||
model=hex(device.model_num),
|
||||
sw_version=hex(device.version_num),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, device._address)},
|
||||
connections={(dr.CONNECTION_BLUETOOTH, device.address)},
|
||||
)
|
||||
self._async_update_attrs()
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "LED BLE",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ble_ble",
|
||||
"requirements": ["led-ble==0.5.4"],
|
||||
"requirements": ["led-ble==0.10.1"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"bluetooth": [
|
||||
@@ -11,7 +11,10 @@
|
||||
{ "local_name": "BLE-LED*" },
|
||||
{ "local_name": "LEDBLE*" },
|
||||
{ "local_name": "Triones*" },
|
||||
{ "local_name": "LEDBlue*" }
|
||||
{ "local_name": "LEDBlue*" },
|
||||
{ "local_name": "Dream~*" },
|
||||
{ "local_name": "QHM-*" },
|
||||
{ "local_name": "AP-*" }
|
||||
],
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Config flow to configure the LG Soundbar integration."""
|
||||
from queue import Queue
|
||||
from queue import Full, Queue
|
||||
import socket
|
||||
|
||||
import temescal
|
||||
@@ -20,18 +20,29 @@ def test_connect(host, port):
|
||||
uuid_q = Queue(maxsize=1)
|
||||
name_q = Queue(maxsize=1)
|
||||
|
||||
def queue_add(attr_q, data):
|
||||
try:
|
||||
attr_q.put_nowait(data)
|
||||
except Full:
|
||||
pass
|
||||
|
||||
def msg_callback(response):
|
||||
if response["msg"] == "MAC_INFO_DEV" and "s_uuid" in response["data"]:
|
||||
uuid_q.put_nowait(response["data"]["s_uuid"])
|
||||
if (
|
||||
response["msg"] in ["MAC_INFO_DEV", "PRODUCT_INFO"]
|
||||
and "s_uuid" in response["data"]
|
||||
):
|
||||
queue_add(uuid_q, response["data"]["s_uuid"])
|
||||
if (
|
||||
response["msg"] == "SPK_LIST_VIEW_INFO"
|
||||
and "s_user_name" in response["data"]
|
||||
):
|
||||
name_q.put_nowait(response["data"]["s_user_name"])
|
||||
queue_add(name_q, response["data"]["s_user_name"])
|
||||
|
||||
try:
|
||||
connection = temescal.temescal(host, port=port, callback=msg_callback)
|
||||
connection.get_mac_info()
|
||||
if uuid_q.empty():
|
||||
connection.get_product_info()
|
||||
connection.get_info()
|
||||
details = {"name": name_q.get(timeout=10), "uuid": uuid_q.get(timeout=10)}
|
||||
return details
|
||||
|
||||
@@ -57,7 +57,7 @@ CONFIG_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.LIGHT]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.LIGHT]
|
||||
DISCOVERY_INTERVAL = timedelta(minutes=15)
|
||||
MIGRATION_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
70
homeassistant/components/lifx/binary_sensor.py
Normal file
70
homeassistant/components/lifx/binary_sensor.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Binary sensor entities for LIFX integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, HEV_CYCLE_STATE
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .entity import LIFXEntity
|
||||
from .util import lifx_features
|
||||
|
||||
HEV_CYCLE_STATE_SENSOR = BinarySensorEntityDescription(
|
||||
key=HEV_CYCLE_STATE,
|
||||
name="Clean Cycle",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up LIFX from a config entry."""
|
||||
coordinator: LIFXUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
if lifx_features(coordinator.device)["hev"]:
|
||||
async_add_entities(
|
||||
[
|
||||
LIFXHevCycleBinarySensorEntity(
|
||||
coordinator=coordinator, description=HEV_CYCLE_STATE_SENSOR
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class LIFXHevCycleBinarySensorEntity(LIFXEntity, BinarySensorEntity):
|
||||
"""LIFX HEV cycle state binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LIFXUpdateCoordinator,
|
||||
description: BinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialise the sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_name = description.name
|
||||
self._attr_unique_id = f"{coordinator.serial_number}_{description.key}"
|
||||
self._async_update_attrs()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Handle coordinator updates."""
|
||||
self._attr_is_on = self.coordinator.async_get_hev_cycle_state()
|
||||
@@ -29,6 +29,15 @@ IDENTIFY_WAVEFORM = {
|
||||
IDENTIFY = "identify"
|
||||
RESTART = "restart"
|
||||
|
||||
ATTR_DURATION = "duration"
|
||||
ATTR_INDICATION = "indication"
|
||||
ATTR_INFRARED = "infrared"
|
||||
ATTR_POWER = "power"
|
||||
ATTR_REMAINING = "remaining"
|
||||
ATTR_ZONES = "zones"
|
||||
|
||||
HEV_CYCLE_STATE = "hev_cycle_state"
|
||||
|
||||
DATA_LIFX_MANAGER = "lifx_manager"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ATTR_REMAINING,
|
||||
IDENTIFY_WAVEFORM,
|
||||
MESSAGE_RETRIES,
|
||||
MESSAGE_TIMEOUT,
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
from .util import async_execute_lifx, get_real_mac_addr, lifx_features
|
||||
|
||||
REQUEST_REFRESH_DELAY = 0.35
|
||||
LIFX_IDENTIFY_DELAY = 3.0
|
||||
|
||||
|
||||
class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
@@ -91,7 +93,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
# Turn the bulb on first, flash for 3 seconds, then turn off
|
||||
await self.async_set_power(state=True, duration=1)
|
||||
await self.async_set_waveform_optional(value=IDENTIFY_WAVEFORM)
|
||||
await asyncio.sleep(3)
|
||||
await asyncio.sleep(LIFX_IDENTIFY_DELAY)
|
||||
await self.async_set_power(state=False, duration=1)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
@@ -101,26 +103,25 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
self.device.get_hostfirmware()
|
||||
if self.device.product is None:
|
||||
self.device.get_version()
|
||||
try:
|
||||
response = await async_execute_lifx(self.device.get_color)
|
||||
except asyncio.TimeoutError as ex:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch state from device: {self.device.ip_addr}"
|
||||
) from ex
|
||||
response = await async_execute_lifx(self.device.get_color)
|
||||
|
||||
if self.device.product is None:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch get version from device: {self.device.ip_addr}"
|
||||
)
|
||||
|
||||
# device.mac_addr is not the mac_address, its the serial number
|
||||
if self.device.mac_addr == TARGET_ANY:
|
||||
self.device.mac_addr = response.target_addr
|
||||
|
||||
if lifx_features(self.device)["multizone"]:
|
||||
try:
|
||||
await self.async_update_color_zones()
|
||||
except asyncio.TimeoutError as ex:
|
||||
raise UpdateFailed(
|
||||
f"Failed to fetch zones from device: {self.device.ip_addr}"
|
||||
) from ex
|
||||
await self.async_update_color_zones()
|
||||
|
||||
if lifx_features(self.device)["hev"]:
|
||||
if self.device.hev_cycle_configuration is None:
|
||||
self.device.get_hev_configuration()
|
||||
|
||||
await self.async_get_hev_cycle()
|
||||
|
||||
async def async_update_color_zones(self) -> None:
|
||||
"""Get updated color information for each zone."""
|
||||
@@ -138,6 +139,17 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator):
|
||||
if zone == top - 1:
|
||||
zone -= 1
|
||||
|
||||
def async_get_hev_cycle_state(self) -> bool | None:
|
||||
"""Return the current HEV cycle state."""
|
||||
if self.device.hev_cycle is None:
|
||||
return None
|
||||
return bool(self.device.hev_cycle.get(ATTR_REMAINING, 0) > 0)
|
||||
|
||||
async def async_get_hev_cycle(self) -> None:
|
||||
"""Update the HEV cycle status from a LIFX Clean bulb."""
|
||||
if lifx_features(self.device)["hev"]:
|
||||
await async_execute_lifx(self.device.get_hev_cycle)
|
||||
|
||||
async def async_set_waveform_optional(
|
||||
self, value: dict[str, Any], rapid: bool = False
|
||||
) -> None:
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
from .const import DATA_LIFX_MANAGER, DOMAIN
|
||||
from .const import ATTR_INFRARED, ATTR_POWER, ATTR_ZONES, DATA_LIFX_MANAGER, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .entity import LIFXEntity
|
||||
from .manager import (
|
||||
@@ -39,13 +39,7 @@ from .manager import (
|
||||
)
|
||||
from .util import convert_8_to_16, convert_16_to_8, find_hsbk, lifx_features, merge_hsbk
|
||||
|
||||
SERVICE_LIFX_SET_STATE = "set_state"
|
||||
|
||||
COLOR_ZONE_POPULATE_DELAY = 0.3
|
||||
|
||||
ATTR_INFRARED = "infrared"
|
||||
ATTR_ZONES = "zones"
|
||||
ATTR_POWER = "power"
|
||||
LIFX_STATE_SETTLE_DELAY = 0.3
|
||||
|
||||
SERVICE_LIFX_SET_STATE = "set_state"
|
||||
|
||||
@@ -225,18 +219,16 @@ class LIFXLight(LIFXEntity, LightEntity):
|
||||
elif power_on:
|
||||
await self.set_power(True, duration=fade)
|
||||
else:
|
||||
if power_on:
|
||||
await self.set_power(True)
|
||||
if hsbk:
|
||||
await self.set_color(hsbk, kwargs, duration=fade)
|
||||
# The response from set_color will tell us if the
|
||||
# bulb is actually on or not, so we don't need to
|
||||
# call power_on if its already on
|
||||
if power_on and self.bulb.power_level == 0:
|
||||
await self.set_power(True)
|
||||
elif power_on:
|
||||
await self.set_power(True)
|
||||
if power_off:
|
||||
await self.set_power(False, duration=fade)
|
||||
|
||||
# Avoid state ping-pong by holding off updates as the state settles
|
||||
await asyncio.sleep(LIFX_STATE_SETTLE_DELAY)
|
||||
|
||||
# Update when the transition starts and ends
|
||||
await self.update_during_transition(fade)
|
||||
|
||||
@@ -344,7 +336,7 @@ class LIFXStrip(LIFXColor):
|
||||
# Zone brightness is not reported when powered off
|
||||
if not self.is_on and hsbk[HSBK_BRIGHTNESS] is None:
|
||||
await self.set_power(True)
|
||||
await asyncio.sleep(COLOR_ZONE_POPULATE_DELAY)
|
||||
await asyncio.sleep(LIFX_STATE_SETTLE_DELAY)
|
||||
await self.update_color_zones()
|
||||
await self.set_power(False)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Litter-Robot",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/litterrobot",
|
||||
"requirements": ["pylitterbot==2022.8.2"],
|
||||
"requirements": ["pylitterbot==2022.9.1"],
|
||||
"codeowners": ["@natekspencer", "@tkdrob"],
|
||||
"dhcp": [{ "hostname": "litter-robot4" }],
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -48,10 +48,7 @@ class MelnorSwitch(MelnorBluetoothBaseEntity, SwitchEntity):
|
||||
super().__init__(coordinator)
|
||||
self._valve_index = valve_index
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"switch-{self._attr_unique_id}-zone{self._valve().id}-manual"
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}-zone{self._valve().id}-manual"
|
||||
self._attr_name = f"{self._device.name} Zone {self._valve().id+1}"
|
||||
|
||||
@property
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Motion Blinds",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"requirements": ["motionblinds==0.6.12"],
|
||||
"requirements": ["motionblinds==0.6.13"],
|
||||
"dependencies": ["network"],
|
||||
"dhcp": [
|
||||
{ "registered_devices": true },
|
||||
|
||||
@@ -30,6 +30,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
from homeassistant.helpers.reload import (
|
||||
async_integration_yaml_config,
|
||||
async_reload_integration_platforms,
|
||||
@@ -65,13 +66,7 @@ from .const import ( # noqa: F401
|
||||
CONF_TLS_VERSION,
|
||||
CONF_TOPIC,
|
||||
CONF_WILL_MESSAGE,
|
||||
CONFIG_ENTRY_IS_SETUP,
|
||||
DATA_MQTT,
|
||||
DATA_MQTT_CONFIG,
|
||||
DATA_MQTT_RELOAD_DISPATCHERS,
|
||||
DATA_MQTT_RELOAD_ENTRY,
|
||||
DATA_MQTT_RELOAD_NEEDED,
|
||||
DATA_MQTT_UPDATED_CONFIG,
|
||||
DEFAULT_ENCODING,
|
||||
DEFAULT_QOS,
|
||||
DEFAULT_RETAIN,
|
||||
@@ -81,7 +76,7 @@ from .const import ( # noqa: F401
|
||||
PLATFORMS,
|
||||
RELOADABLE_PLATFORMS,
|
||||
)
|
||||
from .mixins import async_discover_yaml_entities
|
||||
from .mixins import MqttData
|
||||
from .models import ( # noqa: F401
|
||||
MqttCommandTemplate,
|
||||
MqttValueTemplate,
|
||||
@@ -169,6 +164,8 @@ async def _async_setup_discovery(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Start the MQTT protocol service."""
|
||||
mqtt_data: MqttData = hass.data.setdefault(DATA_MQTT, MqttData())
|
||||
|
||||
conf: ConfigType | None = config.get(DOMAIN)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_subscribe)
|
||||
@@ -177,7 +174,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
if conf:
|
||||
conf = dict(conf)
|
||||
hass.data[DATA_MQTT_CONFIG] = conf
|
||||
mqtt_data.config = conf
|
||||
|
||||
if (mqtt_entry_status := mqtt_config_entry_enabled(hass)) is None:
|
||||
# Create an import flow if the user has yaml configured entities etc.
|
||||
@@ -189,12 +186,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
data={},
|
||||
)
|
||||
hass.data[DATA_MQTT_RELOAD_NEEDED] = True
|
||||
mqtt_data.reload_needed = True
|
||||
elif mqtt_entry_status is False:
|
||||
_LOGGER.info(
|
||||
"MQTT will be not available until the config entry is enabled",
|
||||
)
|
||||
hass.data[DATA_MQTT_RELOAD_NEEDED] = True
|
||||
mqtt_data.reload_needed = True
|
||||
|
||||
return True
|
||||
|
||||
@@ -252,33 +249,34 @@ async def _async_config_entry_updated(hass: HomeAssistant, entry: ConfigEntry) -
|
||||
|
||||
Causes for this is config entry options changing.
|
||||
"""
|
||||
mqtt_client = hass.data[DATA_MQTT]
|
||||
mqtt_data: MqttData = hass.data[DATA_MQTT]
|
||||
assert (client := mqtt_data.client) is not None
|
||||
|
||||
if (conf := hass.data.get(DATA_MQTT_CONFIG)) is None:
|
||||
if (conf := mqtt_data.config) is None:
|
||||
conf = CONFIG_SCHEMA_BASE(dict(entry.data))
|
||||
|
||||
mqtt_client.conf = _merge_extended_config(entry, conf)
|
||||
await mqtt_client.async_disconnect()
|
||||
mqtt_client.init_client()
|
||||
await mqtt_client.async_connect()
|
||||
mqtt_data.config = _merge_extended_config(entry, conf)
|
||||
await client.async_disconnect()
|
||||
client.init_client()
|
||||
await client.async_connect()
|
||||
|
||||
await discovery.async_stop(hass)
|
||||
if mqtt_client.conf.get(CONF_DISCOVERY):
|
||||
await _async_setup_discovery(hass, mqtt_client.conf, entry)
|
||||
if client.conf.get(CONF_DISCOVERY):
|
||||
await _async_setup_discovery(hass, cast(ConfigType, mqtt_data.config), entry)
|
||||
|
||||
|
||||
async def async_fetch_config(hass: HomeAssistant, entry: ConfigEntry) -> dict | None:
|
||||
"""Fetch fresh MQTT yaml config from the hass config when (re)loading the entry."""
|
||||
if DATA_MQTT_RELOAD_ENTRY in hass.data:
|
||||
mqtt_data: MqttData = hass.data[DATA_MQTT]
|
||||
if mqtt_data.reload_entry:
|
||||
hass_config = await conf_util.async_hass_config_yaml(hass)
|
||||
mqtt_config = CONFIG_SCHEMA_BASE(hass_config.get(DOMAIN, {}))
|
||||
hass.data[DATA_MQTT_CONFIG] = mqtt_config
|
||||
mqtt_data.config = CONFIG_SCHEMA_BASE(hass_config.get(DOMAIN, {}))
|
||||
|
||||
# Remove unknown keys from config entry data
|
||||
_filter_entry_config(hass, entry)
|
||||
|
||||
# Merge basic configuration, and add missing defaults for basic options
|
||||
_merge_basic_config(hass, entry, hass.data.get(DATA_MQTT_CONFIG, {}))
|
||||
_merge_basic_config(hass, entry, mqtt_data.config or {})
|
||||
# Bail out if broker setting is missing
|
||||
if CONF_BROKER not in entry.data:
|
||||
_LOGGER.error("MQTT broker is not configured, please configure it")
|
||||
@@ -286,7 +284,7 @@ async def async_fetch_config(hass: HomeAssistant, entry: ConfigEntry) -> dict |
|
||||
|
||||
# If user doesn't have configuration.yaml config, generate default values
|
||||
# for options not in config entry data
|
||||
if (conf := hass.data.get(DATA_MQTT_CONFIG)) is None:
|
||||
if (conf := mqtt_data.config) is None:
|
||||
conf = CONFIG_SCHEMA_BASE(dict(entry.data))
|
||||
|
||||
# User has configuration.yaml config, warn about config entry overrides
|
||||
@@ -309,15 +307,20 @@ async def async_fetch_config(hass: HomeAssistant, entry: ConfigEntry) -> dict |
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Load a config entry."""
|
||||
mqtt_data: MqttData = hass.data.setdefault(DATA_MQTT, MqttData())
|
||||
|
||||
# Merge basic configuration, and add missing defaults for basic options
|
||||
if (conf := await async_fetch_config(hass, entry)) is None:
|
||||
# Bail out
|
||||
return False
|
||||
|
||||
hass.data[DATA_MQTT] = MQTT(hass, entry, conf)
|
||||
mqtt_data.client = MQTT(hass, entry, conf)
|
||||
# Restore saved subscriptions
|
||||
if mqtt_data.subscriptions_to_restore:
|
||||
mqtt_data.client.subscriptions = mqtt_data.subscriptions_to_restore
|
||||
mqtt_data.subscriptions_to_restore = []
|
||||
entry.add_update_listener(_async_config_entry_updated)
|
||||
|
||||
await hass.data[DATA_MQTT].async_connect()
|
||||
await mqtt_data.client.async_connect()
|
||||
|
||||
async def async_publish_service(call: ServiceCall) -> None:
|
||||
"""Handle MQTT publish service calls."""
|
||||
@@ -366,7 +369,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
return
|
||||
|
||||
await hass.data[DATA_MQTT].async_publish(msg_topic, payload, qos, retain)
|
||||
assert mqtt_data.client is not None and msg_topic is not None
|
||||
await mqtt_data.client.async_publish(msg_topic, payload, qos, retain)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_PUBLISH, async_publish_service, schema=MQTT_PUBLISH_SCHEMA
|
||||
@@ -407,7 +411,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
# setup platforms and discovery
|
||||
hass.data[CONFIG_ENTRY_IS_SETUP] = set()
|
||||
|
||||
async def async_setup_reload_service() -> None:
|
||||
"""Create the reload service for the MQTT domain."""
|
||||
@@ -420,13 +423,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await async_reload_integration_platforms(hass, DOMAIN, RELOADABLE_PLATFORMS)
|
||||
|
||||
# Reload the modern yaml platforms
|
||||
mqtt_platforms = async_get_platforms(hass, DOMAIN)
|
||||
tasks = [
|
||||
entity.async_remove()
|
||||
for mqtt_platform in mqtt_platforms
|
||||
for entity in mqtt_platform.entities.values()
|
||||
if not entity._discovery_data # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
if mqtt_platform.config_entry
|
||||
and mqtt_platform.domain in RELOADABLE_PLATFORMS
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
config_yaml = await async_integration_yaml_config(hass, DOMAIN) or {}
|
||||
hass.data[DATA_MQTT_UPDATED_CONFIG] = config_yaml.get(DOMAIN, {})
|
||||
mqtt_data.updated_config = config_yaml.get(DOMAIN, {})
|
||||
await asyncio.gather(
|
||||
*(
|
||||
[
|
||||
async_discover_yaml_entities(hass, component)
|
||||
mqtt_data.reload_handlers[component]()
|
||||
for component in RELOADABLE_PLATFORMS
|
||||
if component in mqtt_data.reload_handlers
|
||||
]
|
||||
)
|
||||
)
|
||||
@@ -438,6 +453,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_forward_entry_setup_and_setup_discovery(config_entry):
|
||||
"""Forward the config entry setup to the platforms and set up discovery."""
|
||||
reload_manual_setup: bool = False
|
||||
# Local import to avoid circular dependencies
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import device_automation, tag
|
||||
@@ -460,8 +476,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await _async_setup_discovery(hass, conf, entry)
|
||||
# Setup reload service after all platforms have loaded
|
||||
await async_setup_reload_service()
|
||||
if DATA_MQTT_RELOAD_NEEDED in hass.data:
|
||||
hass.data.pop(DATA_MQTT_RELOAD_NEEDED)
|
||||
# When the entry is reloaded, also reload manual set up items to enable MQTT
|
||||
if mqtt_data.reload_entry:
|
||||
mqtt_data.reload_entry = False
|
||||
reload_manual_setup = True
|
||||
|
||||
# When the entry was disabled before, reload manual set up items to enable MQTT again
|
||||
if mqtt_data.reload_needed:
|
||||
mqtt_data.reload_needed = False
|
||||
reload_manual_setup = True
|
||||
|
||||
if reload_manual_setup:
|
||||
await async_reload_manual_mqtt_items(hass)
|
||||
|
||||
await async_forward_entry_setup_and_setup_discovery(entry)
|
||||
@@ -568,7 +593,9 @@ def async_subscribe_connection_status(
|
||||
|
||||
def is_connected(hass: HomeAssistant) -> bool:
|
||||
"""Return if MQTT client is connected."""
|
||||
return hass.data[DATA_MQTT].connected
|
||||
mqtt_data: MqttData = hass.data[DATA_MQTT]
|
||||
assert mqtt_data.client is not None
|
||||
return mqtt_data.client.connected
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
@@ -584,6 +611,10 @@ async def async_remove_config_entry_device(
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload MQTT dump and publish service when the config entry is unloaded."""
|
||||
mqtt_data: MqttData = hass.data[DATA_MQTT]
|
||||
assert mqtt_data.client is not None
|
||||
mqtt_client = mqtt_data.client
|
||||
|
||||
# Unload publish and dump services.
|
||||
hass.services.async_remove(
|
||||
DOMAIN,
|
||||
@@ -596,7 +627,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
# Stop the discovery
|
||||
await discovery.async_stop(hass)
|
||||
mqtt_client: MQTT = hass.data[DATA_MQTT]
|
||||
# Unload the platforms
|
||||
await asyncio.gather(
|
||||
*(
|
||||
@@ -606,23 +636,30 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
# Unsubscribe reload dispatchers
|
||||
while reload_dispatchers := hass.data.setdefault(DATA_MQTT_RELOAD_DISPATCHERS, []):
|
||||
while reload_dispatchers := mqtt_data.reload_dispatchers:
|
||||
reload_dispatchers.pop()()
|
||||
hass.data[CONFIG_ENTRY_IS_SETUP] = set()
|
||||
# Cleanup listeners
|
||||
mqtt_client.cleanup()
|
||||
|
||||
# Trigger reload manual MQTT items at entry setup
|
||||
# Reload the legacy yaml platform
|
||||
await async_reload_integration_platforms(hass, DOMAIN, RELOADABLE_PLATFORMS)
|
||||
if (mqtt_entry_status := mqtt_config_entry_enabled(hass)) is False:
|
||||
# The entry is disabled reload legacy manual items when the entry is enabled again
|
||||
hass.data[DATA_MQTT_RELOAD_NEEDED] = True
|
||||
mqtt_data.reload_needed = True
|
||||
elif mqtt_entry_status is True:
|
||||
# The entry is reloaded:
|
||||
# Trigger re-fetching the yaml config at entry setup
|
||||
hass.data[DATA_MQTT_RELOAD_ENTRY] = True
|
||||
# Stop the loop
|
||||
mqtt_data.reload_entry = True
|
||||
# Reload the legacy yaml platform to make entities unavailable
|
||||
await async_reload_integration_platforms(hass, DOMAIN, RELOADABLE_PLATFORMS)
|
||||
# Cleanup entity registry hooks
|
||||
registry_hooks = mqtt_data.discovery_registry_hooks
|
||||
while registry_hooks:
|
||||
registry_hooks.popitem()[1]()
|
||||
# Wait for all ACKs and stop the loop
|
||||
await mqtt_client.async_disconnect()
|
||||
# Store remaining subscriptions to be able to restore or reload them
|
||||
# when the entry is set up again
|
||||
if mqtt_client.subscriptions:
|
||||
mqtt_data.subscriptions_to_restore = mqtt_client.subscriptions
|
||||
|
||||
return True
|
||||
|
||||
@@ -44,7 +44,6 @@ from .debug_info import log_messages
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_discover_yaml_entities,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
@@ -146,9 +145,6 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MQTT alarm control panel through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await async_discover_yaml_entities(hass, alarm.DOMAIN)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
|
||||
)
|
||||
|
||||
@@ -42,7 +42,6 @@ from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttAvailability,
|
||||
MqttEntity,
|
||||
async_discover_yaml_entities,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
@@ -102,9 +101,6 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MQTT binary sensor through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await async_discover_yaml_entities(hass, binary_sensor.DOMAIN)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
|
||||
)
|
||||
|
||||
@@ -25,7 +25,6 @@ from .const import (
|
||||
from .mixins import (
|
||||
MQTT_ENTITY_COMMON_SCHEMA,
|
||||
MqttEntity,
|
||||
async_discover_yaml_entities,
|
||||
async_setup_entry_helper,
|
||||
async_setup_platform_helper,
|
||||
warn_for_legacy_schema,
|
||||
@@ -81,9 +80,6 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MQTT button through configuration.yaml and dynamically through MQTT discovery."""
|
||||
# load and initialize platform config from configuration.yaml
|
||||
await async_discover_yaml_entities(hass, button.DOMAIN)
|
||||
# setup for discovery
|
||||
setup = functools.partial(
|
||||
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user