mirror of
https://github.com/home-assistant/core.git
synced 2026-01-08 16:47:42 +01:00
Compare commits
106 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fdab64e8b | ||
|
|
7be494f845 | ||
|
|
1c8e8419b6 | ||
|
|
da7f206414 | ||
|
|
6bd72c3ff5 | ||
|
|
82c137d69b | ||
|
|
a404c51797 | ||
|
|
ff39cd753c | ||
|
|
fe056f518e | ||
|
|
0ebeb161e1 | ||
|
|
f953454374 | ||
|
|
016834185a | ||
|
|
34c0e0f58d | ||
|
|
95e6969912 | ||
|
|
297e5300b4 | ||
|
|
6cccd87318 | ||
|
|
532c624d01 | ||
|
|
ed17a81f50 | ||
|
|
44be190378 | ||
|
|
b1ac920505 | ||
|
|
1901cc962e | ||
|
|
babaf48867 | ||
|
|
b3e2426967 | ||
|
|
f1ee7fed4c | ||
|
|
3efda8d1d2 | ||
|
|
a5f00d1db2 | ||
|
|
ddbadb1e26 | ||
|
|
cf81a5c09a | ||
|
|
a8b6464d7f | ||
|
|
85ae63c656 | ||
|
|
68cc34df6f | ||
|
|
9955e7e5e1 | ||
|
|
ab8ef1c9e1 | ||
|
|
7e6d64a24c | ||
|
|
e582caccc9 | ||
|
|
1eb8035122 | ||
|
|
57b7ed6a07 | ||
|
|
d35f06ac15 | ||
|
|
bf741c1b26 | ||
|
|
2df709c7d0 | ||
|
|
e9b355bd8a | ||
|
|
ef279b125d | ||
|
|
152b380a2f | ||
|
|
8a39bea761 | ||
|
|
d37fe1fbb6 | ||
|
|
33b56b0cf9 | ||
|
|
0383030266 | ||
|
|
b8fe0c6c3a | ||
|
|
7cb0c98c03 | ||
|
|
58c6702080 | ||
|
|
f77b3d4714 | ||
|
|
f5aee6b886 | ||
|
|
6f26722f69 | ||
|
|
13cfd1bae1 | ||
|
|
5271a3eb1e | ||
|
|
78022bf145 | ||
|
|
99a57f5a4e | ||
|
|
a9e220c96b | ||
|
|
f7d7765d5e | ||
|
|
9ffcf35b23 | ||
|
|
d3a59652bb | ||
|
|
c62a6cd779 | ||
|
|
f1169120ae | ||
|
|
b28dbe20b6 | ||
|
|
8dde59be02 | ||
|
|
abca177894 | ||
|
|
d3bb2e5e16 | ||
|
|
7f8a89838b | ||
|
|
39c4b338f1 | ||
|
|
4518335a56 | ||
|
|
b856b0e15d | ||
|
|
5d518b5365 | ||
|
|
ce86112612 | ||
|
|
e095120023 | ||
|
|
3ef3d848f7 | ||
|
|
610a327b52 | ||
|
|
81436fb688 | ||
|
|
24fe9cdd5a | ||
|
|
e5c499c22e | ||
|
|
99a8604601 | ||
|
|
3ef821d62f | ||
|
|
a38e047e83 | ||
|
|
e0fcf9b648 | ||
|
|
0e823b566b | ||
|
|
a9d24c2cd5 | ||
|
|
7a7cad39eb | ||
|
|
1a76a953c7 | ||
|
|
db27079fa8 | ||
|
|
ef1649383c | ||
|
|
afde5a7ece | ||
|
|
30b8565548 | ||
|
|
a971b92899 | ||
|
|
4ee7cdc8a0 | ||
|
|
4c2788a13c | ||
|
|
8b4e193614 | ||
|
|
f0ce65af7d | ||
|
|
b81c61dd99 | ||
|
|
30ef7a5e88 | ||
|
|
5a6492b76d | ||
|
|
b19fe17e76 | ||
|
|
47326b2295 | ||
|
|
951c373110 | ||
|
|
b9b76b3519 | ||
|
|
da6885af6c | ||
|
|
bc2173747c | ||
|
|
d0e6b3e268 |
@@ -656,11 +656,6 @@ omit =
|
||||
homeassistant/components/plaato/*
|
||||
homeassistant/components/plex/media_player.py
|
||||
homeassistant/components/plex/sensor.py
|
||||
homeassistant/components/plugwise/__init__.py
|
||||
homeassistant/components/plugwise/binary_sensor.py
|
||||
homeassistant/components/plugwise/climate.py
|
||||
homeassistant/components/plugwise/sensor.py
|
||||
homeassistant/components/plugwise/switch.py
|
||||
homeassistant/components/plum_lightpad/light.py
|
||||
homeassistant/components/pocketcasts/sensor.py
|
||||
homeassistant/components/point/*
|
||||
|
||||
@@ -466,7 +466,7 @@ homeassistant/components/velbus/* @Cereal2nd @brefra
|
||||
homeassistant/components/velux/* @Julius2342
|
||||
homeassistant/components/vera/* @vangorra
|
||||
homeassistant/components/versasense/* @flamm3blemuff1n
|
||||
homeassistant/components/version/* @fabaff
|
||||
homeassistant/components/version/* @fabaff @ludeeus
|
||||
homeassistant/components/vesync/* @markperdue @webdjoe @thegardenmonkey
|
||||
homeassistant/components/vicare/* @oischinger
|
||||
homeassistant/components/vilfo/* @ManneW
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderPip: 'Cython;numpy;scikit-build'
|
||||
builderEnvFile: true
|
||||
skipBinary: 'aiohttp'
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
# Write env for build settings
|
||||
(
|
||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM=0"
|
||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM="
|
||||
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1"
|
||||
) > .env_file
|
||||
displayName: 'Prepare requirements files for Home Assistant wheels'
|
||||
|
||||
10
build.json
10
build.json
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-homeassistant",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-homeassistant-base:8.3.0",
|
||||
"armhf": "homeassistant/armhf-homeassistant-base:8.3.0",
|
||||
"armv7": "homeassistant/armv7-homeassistant-base:8.3.0",
|
||||
"amd64": "homeassistant/amd64-homeassistant-base:8.3.0",
|
||||
"i386": "homeassistant/i386-homeassistant-base:8.3.0"
|
||||
"aarch64": "homeassistant/aarch64-homeassistant-base:8.4.0",
|
||||
"armhf": "homeassistant/armhf-homeassistant-base:8.4.0",
|
||||
"armv7": "homeassistant/armv7-homeassistant-base:8.4.0",
|
||||
"amd64": "homeassistant/amd64-homeassistant-base:8.4.0",
|
||||
"i386": "homeassistant/i386-homeassistant-base:8.4.0"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "core"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "accuweather",
|
||||
"name": "AccuWeather",
|
||||
"documentation": "https://www.home-assistant.io/integrations/accuweather/",
|
||||
"requirements": ["accuweather==0.0.10"],
|
||||
"requirements": ["accuweather==0.0.11"],
|
||||
"codeowners": ["@bieniu"],
|
||||
"config_flow": true,
|
||||
"quality_scale": "platinum"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.2.1",
|
||||
"androidtv[async]==0.0.49",
|
||||
"androidtv[async]==0.0.50",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
],
|
||||
"codeowners": ["@JeffLIrion"]
|
||||
|
||||
@@ -380,7 +380,7 @@ def adb_decorator(override_available=False):
|
||||
# An unforeseen exception occurred. Close the ADB connection so that
|
||||
# it doesn't happen over and over again, then raise the exception.
|
||||
await self.aftv.adb_close()
|
||||
self._available = False # pylint: disable=protected-access
|
||||
self._available = False
|
||||
raise
|
||||
|
||||
return _adb_exception_catcher
|
||||
|
||||
@@ -29,8 +29,11 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
def get_service(hass, config, discovery_info=None):
|
||||
"""Get the Apprise notification service."""
|
||||
|
||||
# Create our object
|
||||
a_obj = apprise.Apprise()
|
||||
# Create our Apprise Asset Object
|
||||
asset = apprise.AppriseAsset(async_mode=False)
|
||||
|
||||
# Create our Apprise Instance (reference our asset)
|
||||
a_obj = apprise.Apprise(asset=asset)
|
||||
|
||||
if config.get(CONF_FILE):
|
||||
# Sourced from a Configuration File
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
"name": "Axis",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/axis",
|
||||
"requirements": ["axis==35"],
|
||||
"requirements": ["axis==37"],
|
||||
"zeroconf": [
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"00408C*"},
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"ACCC8E*"},
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"B8A44F*"}
|
||||
{ "type": "_axis-video._tcp.local.", "macaddress": "00408C*" },
|
||||
{ "type": "_axis-video._tcp.local.", "macaddress": "ACCC8E*" },
|
||||
{ "type": "_axis-video._tcp.local.", "macaddress": "B8A44F*" }
|
||||
],
|
||||
"after_dependencies": ["mqtt"],
|
||||
"codeowners": ["@Kane610"]
|
||||
|
||||
@@ -182,6 +182,7 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
entity = event.data.get("entity_id")
|
||||
|
||||
self.current_observations.update(self._record_entity_observations(entity))
|
||||
self.async_set_context(event.context)
|
||||
self._recalculate_and_write_state()
|
||||
|
||||
self.async_on_remove(
|
||||
@@ -220,6 +221,8 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
obs_entry = None
|
||||
self.current_observations[obs["id"]] = obs_entry
|
||||
|
||||
if event:
|
||||
self.async_set_context(event.context)
|
||||
self._recalculate_and_write_state()
|
||||
|
||||
for template in self.observations_by_template:
|
||||
|
||||
@@ -54,7 +54,9 @@ class BOMWeather(WeatherEntity):
|
||||
@property
|
||||
def condition(self):
|
||||
"""Return the current condition."""
|
||||
return self.bom_data.get_reading("weather")
|
||||
return self.bom_data.get_reading("weather") or self.bom_data.get_reading(
|
||||
"cloud"
|
||||
)
|
||||
|
||||
# Now implement the WeatherEntity interface
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from broadlink.exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant import config_entries, data_entry_flow
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_TIMEOUT, CONF_TYPE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
@@ -20,6 +20,7 @@ from .const import ( # pylint: disable=unused-import
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_TIMEOUT,
|
||||
DOMAIN,
|
||||
DOMAINS_AND_TYPES,
|
||||
)
|
||||
from .helpers import format_mac
|
||||
|
||||
@@ -36,6 +37,19 @@ class BroadlinkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_set_device(self, device, raise_on_progress=True):
|
||||
"""Define a device for the config flow."""
|
||||
supported_types = {
|
||||
device_type
|
||||
for _, device_types in DOMAINS_AND_TYPES
|
||||
for device_type in device_types
|
||||
}
|
||||
if device.type not in supported_types:
|
||||
LOGGER.error(
|
||||
"Unsupported device: %s. If it worked before, please open "
|
||||
"an issue at https://github.com/home-assistant/core/issues",
|
||||
hex(device.devtype),
|
||||
)
|
||||
raise data_entry_flow.AbortFlow("not_supported")
|
||||
|
||||
await self.async_set_unique_id(
|
||||
device.mac.hex(), raise_on_progress=raise_on_progress
|
||||
)
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
"already_in_progress": "There is already a configuration flow in progress for this device",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_host": "Invalid hostname or IP address",
|
||||
"not_supported": "Device not supported",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"already_in_progress": "There is already a configuration flow in progress for this device",
|
||||
"cannot_connect": "Failed to connect",
|
||||
"invalid_host": "Invalid hostname or IP address",
|
||||
"not_supported": "Device not supported",
|
||||
"unknown": "Unexpected error"
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
"""Support for fetching data from Broadlink devices."""
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
|
||||
import broadlink as blk
|
||||
from broadlink.exceptions import (
|
||||
AuthorizationError,
|
||||
BroadlinkException,
|
||||
CommandNotSupportedError,
|
||||
DeviceOfflineError,
|
||||
StorageError,
|
||||
)
|
||||
|
||||
@@ -18,6 +21,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def get_update_manager(device):
|
||||
"""Return an update manager for a given Broadlink device."""
|
||||
if device.api.model.startswith("RM mini"):
|
||||
return BroadlinkRMMini3UpdateManager(device)
|
||||
|
||||
update_managers = {
|
||||
"A1": BroadlinkA1UpdateManager,
|
||||
"MP1": BroadlinkMP1UpdateManager,
|
||||
@@ -95,6 +101,22 @@ class BroadlinkMP1UpdateManager(BroadlinkUpdateManager):
|
||||
return await self.device.async_request(self.device.api.check_power)
|
||||
|
||||
|
||||
class BroadlinkRMMini3UpdateManager(BroadlinkUpdateManager):
|
||||
"""Manages updates for Broadlink RM mini 3 devices."""
|
||||
|
||||
async def async_fetch_data(self):
|
||||
"""Fetch data from the device."""
|
||||
hello = partial(
|
||||
blk.discover,
|
||||
discover_ip_address=self.device.api.host[0],
|
||||
timeout=self.device.api.timeout,
|
||||
)
|
||||
devices = await self.device.hass.async_add_executor_job(hello)
|
||||
if not devices:
|
||||
raise DeviceOfflineError("The device is offline")
|
||||
return {}
|
||||
|
||||
|
||||
class BroadlinkRMUpdateManager(BroadlinkUpdateManager):
|
||||
"""Manages updates for Broadlink RM2 and RM4 devices."""
|
||||
|
||||
|
||||
@@ -375,9 +375,9 @@ class CastDevice(MediaPlayerEntity):
|
||||
if tts_base_url and media_status.content_id.startswith(tts_base_url):
|
||||
url_description = f" from tts.base_url ({tts_base_url})"
|
||||
if external_url and media_status.content_id.startswith(external_url):
|
||||
url_description = " from external_url ({external_url})"
|
||||
url_description = f" from external_url ({external_url})"
|
||||
if internal_url and media_status.content_id.startswith(internal_url):
|
||||
url_description = " from internal_url ({internal_url})"
|
||||
url_description = f" from internal_url ({internal_url})"
|
||||
|
||||
_LOGGER.error(
|
||||
"Failed to cast media %s%s. Please make sure the URL is: "
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
"name": "CoolMasterNet",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/coolmaster",
|
||||
"requirements": ["pycoolmasternet-async==0.1.1"],
|
||||
"requirements": ["pycoolmasternet-async==0.1.2"],
|
||||
"codeowners": ["@OnFreund"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "deCONZ",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/deconz",
|
||||
"requirements": ["pydeconz==72"],
|
||||
"requirements": ["pydeconz==73"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics"
|
||||
|
||||
@@ -27,7 +27,7 @@ async def async_setup_entry(
|
||||
|
||||
for device in hass.data[DOMAIN]["homecontrol"].multi_level_switch_devices:
|
||||
for multi_level_switch in device.multi_level_switch_property:
|
||||
if device.deviceModelUID in [
|
||||
if device.device_model_uid in [
|
||||
"devolo.model.Thermostat:Valve",
|
||||
"devolo.model.Room:Thermostat",
|
||||
]:
|
||||
|
||||
@@ -48,9 +48,9 @@ class DSMRConnection:
|
||||
"""Test if we can validate connection with the device."""
|
||||
|
||||
def update_telegram(telegram):
|
||||
self._telegram = telegram
|
||||
|
||||
transport.close()
|
||||
if obis_ref.EQUIPMENT_IDENTIFIER in telegram:
|
||||
self._telegram = telegram
|
||||
transport.close()
|
||||
|
||||
if self._host is None:
|
||||
reader_factory = partial(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Support for displaying weather info from Ecobee API."""
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
from pyecobee.const import ECOBEE_STATE_UNKNOWN
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.components.weather import (
|
||||
WeatherEntity,
|
||||
)
|
||||
from homeassistant.const import TEMP_FAHRENHEIT
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
@@ -165,10 +166,13 @@ class EcobeeWeather(WeatherEntity):
|
||||
return None
|
||||
|
||||
forecasts = []
|
||||
for day in range(1, 5):
|
||||
date = dt_util.utcnow()
|
||||
for day in range(0, 5):
|
||||
forecast = _process_forecast(self.weather["forecasts"][day])
|
||||
if forecast is None:
|
||||
continue
|
||||
forecast[ATTR_FORECAST_TIME] = date.isoformat()
|
||||
date += timedelta(days=1)
|
||||
forecasts.append(forecast)
|
||||
|
||||
if forecasts:
|
||||
@@ -186,9 +190,6 @@ def _process_forecast(json):
|
||||
"""Process a single ecobee API forecast to return expected values."""
|
||||
forecast = {}
|
||||
try:
|
||||
forecast[ATTR_FORECAST_TIME] = datetime.strptime(
|
||||
json["dateTime"], "%Y-%m-%d %H:%M:%S"
|
||||
).isoformat()
|
||||
forecast[ATTR_FORECAST_CONDITION] = ECOBEE_WEATHER_SYMBOL_TO_HASS[
|
||||
json["weatherSymbol"]
|
||||
]
|
||||
|
||||
@@ -66,6 +66,7 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[CONF_PORT]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
device_id = None
|
||||
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
|
||||
@@ -129,6 +130,15 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
||||
"Can only generate events under esphome domain! (%s)", host
|
||||
)
|
||||
return
|
||||
|
||||
# Call native tag scan
|
||||
if service_name == "tag_scanned":
|
||||
tag_id = service_data["tag_id"]
|
||||
hass.async_create_task(
|
||||
hass.components.tag.async_scan_tag(tag_id, device_id)
|
||||
)
|
||||
return
|
||||
|
||||
hass.bus.async_fire(service.service, service_data)
|
||||
else:
|
||||
hass.async_create_task(
|
||||
@@ -166,10 +176,13 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
||||
|
||||
async def on_login() -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
nonlocal device_id
|
||||
try:
|
||||
entry_data.device_info = await cli.device_info()
|
||||
entry_data.available = True
|
||||
await _async_setup_device_registry(hass, entry, entry_data.device_info)
|
||||
device_id = await _async_setup_device_registry(
|
||||
hass, entry, entry_data.device_info
|
||||
)
|
||||
entry_data.async_update_device_state(hass)
|
||||
|
||||
entity_infos, services = await cli.list_entities_services()
|
||||
@@ -265,7 +278,7 @@ async def _async_setup_device_registry(
|
||||
if device_info.compilation_time:
|
||||
sw_version += f" ({device_info.compilation_time})"
|
||||
device_registry = await dr.async_get_registry(hass)
|
||||
device_registry.async_get_or_create(
|
||||
entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)},
|
||||
name=device_info.name,
|
||||
@@ -273,6 +286,7 @@ async def _async_setup_device_registry(
|
||||
model=device_info.model,
|
||||
sw_version=sw_version,
|
||||
)
|
||||
return entry.id
|
||||
|
||||
|
||||
async def _register_service(
|
||||
|
||||
@@ -6,7 +6,5 @@
|
||||
"requirements": ["aioesphomeapi==2.6.3"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."],
|
||||
"codeowners": ["@OttoWinter"],
|
||||
"after_dependencies": [
|
||||
"zeroconf"
|
||||
]
|
||||
"after_dependencies": ["zeroconf", "tag"]
|
||||
}
|
||||
|
||||
@@ -146,11 +146,12 @@ class FreeboxCallSensor(FreeboxSensor):
|
||||
def async_update_state(self) -> None:
|
||||
"""Update the Freebox call sensor."""
|
||||
self._call_list_for_type = []
|
||||
for call in self._router.call_list:
|
||||
if not call["new"]:
|
||||
continue
|
||||
if call["type"] == self._sensor_type:
|
||||
self._call_list_for_type.append(call)
|
||||
if self._router.call_list:
|
||||
for call in self._router.call_list:
|
||||
if not call["new"]:
|
||||
continue
|
||||
if call["type"] == self._sensor_type:
|
||||
self._call_list_for_type.append(call)
|
||||
|
||||
self._state = len(self._call_list_for_type)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200909.0"],
|
||||
"requirements": ["home-assistant-frontend==20200918.2"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Gogogate2 and iSmartGate",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/gogogate2",
|
||||
"requirements": ["gogogate2-api==2.0.1"],
|
||||
"requirements": ["gogogate2-api==2.0.3"],
|
||||
"codeowners": ["@vangorra"],
|
||||
"homekit": {
|
||||
"models": [
|
||||
|
||||
@@ -39,7 +39,7 @@ from homeassistant.const import (
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
)
|
||||
from homeassistant.core import State
|
||||
from homeassistant.core import CoreState, State
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
|
||||
@@ -162,6 +162,10 @@ class CoverGroup(GroupEntity, CoverEntity):
|
||||
self.hass, self._entities, self._update_supported_features_event
|
||||
)
|
||||
)
|
||||
|
||||
if self.hass.state == CoreState.running:
|
||||
await self.async_update()
|
||||
return
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@property
|
||||
|
||||
@@ -36,7 +36,7 @@ from homeassistant.const import (
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import State
|
||||
from homeassistant.core import CoreState, State
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
|
||||
@@ -111,6 +111,11 @@ class LightGroup(GroupEntity, light.LightEntity):
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
)
|
||||
|
||||
if self.hass.state == CoreState.running:
|
||||
await self.async_update()
|
||||
return
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@property
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hangouts",
|
||||
"requirements": [
|
||||
"hangups==0.4.10"
|
||||
"hangups==0.4.11"
|
||||
],
|
||||
"codeowners": []
|
||||
}
|
||||
|
||||
@@ -35,15 +35,15 @@ def _convert_states(states):
|
||||
"""Convert state definitions to State objects."""
|
||||
result = {}
|
||||
|
||||
for entity_id in states:
|
||||
for entity_id, info in states.items():
|
||||
entity_id = cv.entity_id(entity_id)
|
||||
|
||||
if isinstance(states[entity_id], dict):
|
||||
entity_attrs = states[entity_id].copy()
|
||||
if isinstance(info, dict):
|
||||
entity_attrs = info.copy()
|
||||
state = entity_attrs.pop(ATTR_STATE, None)
|
||||
attributes = entity_attrs
|
||||
else:
|
||||
state = states[entity_id]
|
||||
state = info
|
||||
attributes = {}
|
||||
|
||||
# YAML translates 'on' to a boolean
|
||||
|
||||
@@ -145,7 +145,7 @@ async def async_attach_trigger(
|
||||
else:
|
||||
cur_value = new_st.attributes.get(attribute)
|
||||
|
||||
if CONF_TO not in config:
|
||||
if CONF_FROM in config and CONF_TO not in config:
|
||||
return cur_value != old_value
|
||||
|
||||
return cur_value == new_value
|
||||
|
||||
@@ -36,7 +36,7 @@ class TimePattern:
|
||||
if isinstance(value, str) and value.startswith("/"):
|
||||
number = int(value[1:])
|
||||
else:
|
||||
number = int(value)
|
||||
value = number = int(value)
|
||||
|
||||
if not (0 <= number <= self.maximum):
|
||||
raise vol.Invalid(f"must be a value between 0 and {self.maximum}")
|
||||
|
||||
@@ -24,6 +24,10 @@ from homeassistant.const import (
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_mired_to_kelvin,
|
||||
color_temperature_to_hs,
|
||||
)
|
||||
|
||||
from .accessories import TYPES, HomeAccessory
|
||||
from .const import (
|
||||
@@ -64,8 +68,6 @@ class Light(HomeAccessory):
|
||||
if self._features & SUPPORT_COLOR:
|
||||
self.chars.append(CHAR_HUE)
|
||||
self.chars.append(CHAR_SATURATION)
|
||||
self._hue = None
|
||||
self._saturation = None
|
||||
elif self._features & SUPPORT_COLOR_TEMP:
|
||||
# ColorTemperature and Hue characteristic should not be
|
||||
# exposed both. Both states are tracked separately in HomeKit,
|
||||
@@ -179,7 +181,16 @@ class Light(HomeAccessory):
|
||||
|
||||
# Handle Color
|
||||
if CHAR_SATURATION in self.chars and CHAR_HUE in self.chars:
|
||||
hue, saturation = new_state.attributes.get(ATTR_HS_COLOR, (None, None))
|
||||
if ATTR_HS_COLOR in new_state.attributes:
|
||||
hue, saturation = new_state.attributes[ATTR_HS_COLOR]
|
||||
elif ATTR_COLOR_TEMP in new_state.attributes:
|
||||
hue, saturation = color_temperature_to_hs(
|
||||
color_temperature_mired_to_kelvin(
|
||||
new_state.attributes[ATTR_COLOR_TEMP]
|
||||
)
|
||||
)
|
||||
else:
|
||||
hue, saturation = None, None
|
||||
if isinstance(hue, (int, float)) and isinstance(saturation, (int, float)):
|
||||
hue = round(hue, 0)
|
||||
saturation = round(saturation, 0)
|
||||
|
||||
@@ -8,12 +8,19 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
async_get_registry as async_get_device_registry,
|
||||
)
|
||||
|
||||
from .connection import get_accessory_name, get_bridge_information
|
||||
from .const import DOMAIN, KNOWN_DEVICES
|
||||
|
||||
HOMEKIT_IGNORE = ["Home Assistant Bridge"]
|
||||
HOMEKIT_DIR = ".homekit"
|
||||
HOMEKIT_BRIDGE_DOMAIN = "homekit"
|
||||
HOMEKIT_BRIDGE_SERIAL_NUMBER = "homekit.bridge"
|
||||
HOMEKIT_BRIDGE_MODEL = "Home Assistant HomeKit Bridge"
|
||||
|
||||
PAIRING_FILE = "pairing.json"
|
||||
|
||||
PIN_FORMAT = re.compile(r"^(\d{3})-{0,1}(\d{2})-{0,1}(\d{3})$")
|
||||
@@ -141,6 +148,17 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow):
|
||||
|
||||
return self.async_abort(reason="no_devices")
|
||||
|
||||
async def _hkid_is_homekit_bridge(self, hkid):
|
||||
"""Determine if the device is a homekit bridge."""
|
||||
dev_reg = await async_get_device_registry(self.hass)
|
||||
device = dev_reg.async_get_device(
|
||||
identifiers=set(), connections={(CONNECTION_NETWORK_MAC, hkid)}
|
||||
)
|
||||
|
||||
if device is None:
|
||||
return False
|
||||
return device.model == HOMEKIT_BRIDGE_MODEL
|
||||
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Handle a discovered HomeKit accessory.
|
||||
|
||||
@@ -153,6 +171,12 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow):
|
||||
key.lower(): value for (key, value) in discovery_info["properties"].items()
|
||||
}
|
||||
|
||||
if "id" not in properties:
|
||||
_LOGGER.warning(
|
||||
"HomeKit device %s: id not exposed, in violation of spec", properties
|
||||
)
|
||||
return self.async_abort(reason="invalid_properties")
|
||||
|
||||
# The hkid is a unique random number that looks like a pairing code.
|
||||
# It changes if a device is factory reset.
|
||||
hkid = properties["id"]
|
||||
@@ -208,7 +232,7 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow):
|
||||
# Devices in HOMEKIT_IGNORE have native local integrations - users
|
||||
# should be encouraged to use native integration and not confused
|
||||
# by alternative HK API.
|
||||
if model in HOMEKIT_IGNORE:
|
||||
if await self._hkid_is_homekit_bridge(hkid):
|
||||
return self.async_abort(reason="ignored_model")
|
||||
|
||||
self.model = model
|
||||
@@ -280,9 +304,8 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow):
|
||||
# Its possible that the first try may have been busy so
|
||||
# we always check to see if self.finish_paring has been
|
||||
# set.
|
||||
discovery = await self.controller.find_ip_by_device_id(self.hkid)
|
||||
|
||||
try:
|
||||
discovery = await self.controller.find_ip_by_device_id(self.hkid)
|
||||
self.finish_pairing = await discovery.start_pairing(self.hkid)
|
||||
|
||||
except aiohomekit.BusyError:
|
||||
|
||||
@@ -3,8 +3,16 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit[IP]==0.2.49"],
|
||||
"zeroconf": ["_hap._tcp.local."],
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"codeowners": ["@Jc2k"]
|
||||
"requirements": [
|
||||
"aiohomekit==0.2.53"
|
||||
],
|
||||
"zeroconf": [
|
||||
"_hap._tcp.local."
|
||||
],
|
||||
"after_dependencies": [
|
||||
"zeroconf"
|
||||
],
|
||||
"codeowners": [
|
||||
"@Jc2k"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"already_configured": "Accessory is already configured with this controller.",
|
||||
"invalid_config_entry": "This device is showing as ready to pair but there is already a conflicting configuration entry for it in Home Assistant that must first be removed.",
|
||||
"accessory_not_found_error": "Cannot add pairing as device can no longer be found.",
|
||||
"invalid_properties": "Invalid properties announced by device.",
|
||||
"already_in_progress": "Config flow for device is already in progress."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "insteon",
|
||||
"name": "Insteon",
|
||||
"documentation": "https://www.home-assistant.io/integrations/insteon",
|
||||
"requirements": ["pyinsteon==1.0.7"],
|
||||
"requirements": ["pyinsteon==1.0.8"],
|
||||
"codeowners": ["@teharris1"],
|
||||
"config_flow": true
|
||||
}
|
||||
@@ -116,6 +116,9 @@ class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
|
||||
self.context.update({"title_placeholders": {CONF_NAME: self._name}})
|
||||
|
||||
try:
|
||||
await validate_http(self.hass, self._get_data())
|
||||
await validate_ws(self.hass, self._get_data())
|
||||
@@ -129,8 +132,6 @@ class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
|
||||
self.context.update({"title_placeholders": {CONF_NAME: self._name}})
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(self, user_input=None):
|
||||
@@ -201,6 +202,10 @@ class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self._ws_port = user_input.get(CONF_WS_PORT)
|
||||
|
||||
# optional ints return 0 rather than None when empty
|
||||
if self._ws_port == 0:
|
||||
self._ws_port = None
|
||||
|
||||
try:
|
||||
await validate_ws(self.hass, self._get_data())
|
||||
except WSCannotConnect:
|
||||
|
||||
@@ -714,7 +714,7 @@ class KodiEntity(MediaPlayerEntity):
|
||||
_LOGGER.debug("Run API method %s, kwargs=%s", method, kwargs)
|
||||
result_ok = False
|
||||
try:
|
||||
result = self._kodi.call_method(method, **kwargs)
|
||||
result = await self._kodi.call_method(method, **kwargs)
|
||||
result_ok = True
|
||||
except jsonrpc_base.jsonrpc.ProtocolError as exc:
|
||||
result = exc.args[2]["error"]
|
||||
|
||||
@@ -94,6 +94,12 @@ class LuciDeviceScanner(DeviceScanner):
|
||||
|
||||
last_results = []
|
||||
for device in result:
|
||||
last_results.append(device)
|
||||
if (
|
||||
not hasattr(self.router.router.owrt_version, "release")
|
||||
or not self.router.router.owrt_version.release
|
||||
or self.router.router.owrt_version.release[0] < 19
|
||||
or device.reachable
|
||||
):
|
||||
last_results.append(device)
|
||||
|
||||
self.last_results = last_results
|
||||
|
||||
@@ -15,4 +15,6 @@ MEDIA_CLASS_MAP = {
|
||||
"image": MEDIA_CLASS_IMAGE,
|
||||
}
|
||||
URI_SCHEME = "media-source://"
|
||||
URI_SCHEME_REGEX = re.compile(r"^media-source://(?P<domain>[^/]+)?(?P<identifier>.+)?")
|
||||
URI_SCHEME_REGEX = re.compile(
|
||||
r"^media-source:\/\/(?:(?P<domain>(?!.+__)(?!_)[\da-z_]+(?<!_))(?:\/(?P<identifier>(?!\/).+))?)?$"
|
||||
)
|
||||
|
||||
@@ -21,26 +21,7 @@ def async_setup(hass: HomeAssistant):
|
||||
"""Set up local media source."""
|
||||
source = LocalSource(hass)
|
||||
hass.data[DOMAIN][DOMAIN] = source
|
||||
hass.http.register_view(LocalMediaView(hass))
|
||||
|
||||
|
||||
@callback
|
||||
def async_parse_identifier(item: MediaSourceItem) -> Tuple[str, str]:
|
||||
"""Parse identifier."""
|
||||
if not item.identifier:
|
||||
source_dir_id = "media"
|
||||
location = ""
|
||||
|
||||
else:
|
||||
source_dir_id, location = item.identifier.lstrip("/").split("/", 1)
|
||||
|
||||
if source_dir_id != "media":
|
||||
raise Unresolvable("Unknown source directory.")
|
||||
|
||||
if location != sanitize_path(location):
|
||||
raise Unresolvable("Invalid path.")
|
||||
|
||||
return source_dir_id, location
|
||||
hass.http.register_view(LocalMediaView(hass, source))
|
||||
|
||||
|
||||
class LocalSource(MediaSource):
|
||||
@@ -56,22 +37,41 @@ class LocalSource(MediaSource):
|
||||
@callback
|
||||
def async_full_path(self, source_dir_id, location) -> Path:
|
||||
"""Return full path."""
|
||||
return self.hass.config.path("media", location)
|
||||
return Path(self.hass.config.media_dirs[source_dir_id], location)
|
||||
|
||||
@callback
|
||||
def async_parse_identifier(self, item: MediaSourceItem) -> Tuple[str, str]:
|
||||
"""Parse identifier."""
|
||||
if not item.identifier:
|
||||
# Empty source_dir_id and location
|
||||
return "", ""
|
||||
|
||||
source_dir_id, location = item.identifier.split("/", 1)
|
||||
if source_dir_id not in self.hass.config.media_dirs:
|
||||
raise Unresolvable("Unknown source directory.")
|
||||
|
||||
if location != sanitize_path(location):
|
||||
raise Unresolvable("Invalid path.")
|
||||
|
||||
return source_dir_id, location
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> str:
|
||||
"""Resolve media to a url."""
|
||||
source_dir_id, location = async_parse_identifier(item)
|
||||
source_dir_id, location = self.async_parse_identifier(item)
|
||||
if source_dir_id == "" or source_dir_id not in self.hass.config.media_dirs:
|
||||
raise Unresolvable("Unknown source directory.")
|
||||
|
||||
mime_type, _ = mimetypes.guess_type(
|
||||
self.async_full_path(source_dir_id, location)
|
||||
str(self.async_full_path(source_dir_id, location))
|
||||
)
|
||||
return PlayMedia(item.identifier, mime_type)
|
||||
return PlayMedia(f"/media/{item.identifier}", mime_type)
|
||||
|
||||
async def async_browse_media(
|
||||
self, item: MediaSourceItem, media_types: Tuple[str] = MEDIA_MIME_TYPES
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
try:
|
||||
source_dir_id, location = async_parse_identifier(item)
|
||||
source_dir_id, location = self.async_parse_identifier(item)
|
||||
except Unresolvable as err:
|
||||
raise BrowseError(str(err)) from err
|
||||
|
||||
@@ -79,9 +79,37 @@ class LocalSource(MediaSource):
|
||||
self._browse_media, source_dir_id, location
|
||||
)
|
||||
|
||||
def _browse_media(self, source_dir_id, location):
|
||||
def _browse_media(self, source_dir_id: str, location: Path):
|
||||
"""Browse media."""
|
||||
full_path = Path(self.hass.config.path("media", location))
|
||||
|
||||
# If only one media dir is configured, use that as the local media root
|
||||
if source_dir_id == "" and len(self.hass.config.media_dirs) == 1:
|
||||
source_dir_id = list(self.hass.config.media_dirs)[0]
|
||||
|
||||
# Multiple folder, root is requested
|
||||
if source_dir_id == "":
|
||||
if location:
|
||||
raise BrowseError("Folder not found.")
|
||||
|
||||
base = BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier="",
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_type=None,
|
||||
title=self.name,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MEDIA_CLASS_DIRECTORY,
|
||||
)
|
||||
|
||||
base.children = [
|
||||
self._browse_media(source_dir_id, "")
|
||||
for source_dir_id in self.hass.config.media_dirs
|
||||
]
|
||||
|
||||
return base
|
||||
|
||||
full_path = Path(self.hass.config.media_dirs[source_dir_id], location)
|
||||
|
||||
if not full_path.exists():
|
||||
if location == "":
|
||||
@@ -118,7 +146,7 @@ class LocalSource(MediaSource):
|
||||
|
||||
media = BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.path('media'))}",
|
||||
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.media_dirs[source_dir_id])}",
|
||||
media_class=media_class,
|
||||
media_content_type=mime_type or "",
|
||||
title=title,
|
||||
@@ -149,19 +177,25 @@ class LocalMediaView(HomeAssistantView):
|
||||
Returns media files in config/media.
|
||||
"""
|
||||
|
||||
url = "/media/{location:.*}"
|
||||
url = "/media/{source_dir_id}/{location:.*}"
|
||||
name = "media"
|
||||
|
||||
def __init__(self, hass: HomeAssistant):
|
||||
def __init__(self, hass: HomeAssistant, source: LocalSource):
|
||||
"""Initialize the media view."""
|
||||
self.hass = hass
|
||||
self.source = source
|
||||
|
||||
async def get(self, request: web.Request, location: str) -> web.FileResponse:
|
||||
async def get(
|
||||
self, request: web.Request, source_dir_id: str, location: str
|
||||
) -> web.FileResponse:
|
||||
"""Start a GET request."""
|
||||
if location != sanitize_path(location):
|
||||
return web.HTTPNotFound()
|
||||
raise web.HTTPNotFound()
|
||||
|
||||
media_path = Path(self.hass.config.path("media", location))
|
||||
if source_dir_id not in self.hass.config.media_dirs:
|
||||
raise web.HTTPNotFound()
|
||||
|
||||
media_path = self.source.async_full_path(source_dir_id, location)
|
||||
|
||||
# Check that the file exists
|
||||
if not media_path.is_file():
|
||||
|
||||
@@ -5,6 +5,8 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_CONDITION,
|
||||
ATTR_FORECAST_TEMP,
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_WEATHER_HUMIDITY,
|
||||
ATTR_WEATHER_PRESSURE,
|
||||
ATTR_WEATHER_TEMPERATURE,
|
||||
@@ -209,13 +211,17 @@ class MetWeather(CoordinatorEntity, WeatherEntity):
|
||||
met_forecast = self.coordinator.data.hourly_forecast
|
||||
else:
|
||||
met_forecast = self.coordinator.data.daily_forecast
|
||||
required_keys = {ATTR_FORECAST_TEMP, ATTR_FORECAST_TIME}
|
||||
ha_forecast = []
|
||||
for met_item in met_forecast:
|
||||
if not set(met_item).issuperset(required_keys):
|
||||
continue
|
||||
ha_item = {
|
||||
k: met_item[v] for k, v in FORECAST_MAP.items() if met_item.get(v)
|
||||
}
|
||||
ha_item[ATTR_FORECAST_CONDITION] = format_condition(
|
||||
ha_item[ATTR_FORECAST_CONDITION]
|
||||
)
|
||||
if ha_item.get(ATTR_FORECAST_CONDITION):
|
||||
ha_item[ATTR_FORECAST_CONDITION] = format_condition(
|
||||
ha_item[ATTR_FORECAST_CONDITION]
|
||||
)
|
||||
ha_forecast.append(ha_item)
|
||||
return ha_forecast
|
||||
|
||||
@@ -158,19 +158,22 @@ class ModbusCoilSwitch(ToggleEntity, RestoreEntity):
|
||||
"""Update the state of the switch."""
|
||||
self._is_on = self._read_coil(self._coil)
|
||||
|
||||
def _read_coil(self, coil) -> Optional[bool]:
|
||||
def _read_coil(self, coil) -> bool:
|
||||
"""Read coil using the Modbus hub slave."""
|
||||
try:
|
||||
result = self._hub.read_coils(self._slave, coil, 1)
|
||||
except ConnectionException:
|
||||
self._available = False
|
||||
return
|
||||
return False
|
||||
|
||||
if isinstance(result, (ModbusException, ExceptionResponse)):
|
||||
self._available = False
|
||||
return
|
||||
return False
|
||||
|
||||
self._available = True
|
||||
# bits[0] select the lowest bit in result,
|
||||
# is_on for a binary_sensor is true if the bit are 1
|
||||
# The other bits are not considered.
|
||||
return bool(result.bits[0])
|
||||
|
||||
def _write_coil(self, coil, value):
|
||||
|
||||
@@ -138,12 +138,12 @@ class MpdDevice(MediaPlayerEntity):
|
||||
if position is None:
|
||||
position = self._status.get("time")
|
||||
|
||||
if position is not None and ":" in position:
|
||||
if isinstance(position, str) and ":" in position:
|
||||
position = position.split(":")[0]
|
||||
|
||||
if position is not None and self._media_position != position:
|
||||
self._media_position_updated_at = dt_util.utcnow()
|
||||
self._media_position = int(position)
|
||||
self._media_position = int(float(position))
|
||||
|
||||
self._update_playlists()
|
||||
|
||||
@@ -159,8 +159,9 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self._connect()
|
||||
|
||||
self._fetch_status()
|
||||
except (mpd.ConnectionError, OSError, BrokenPipeError, ValueError):
|
||||
except (mpd.ConnectionError, OSError, BrokenPipeError, ValueError) as error:
|
||||
# Cleanly disconnect in case connection is not in valid state
|
||||
_LOGGER.debug("Error updating status: %s", error)
|
||||
self._disconnect()
|
||||
|
||||
@property
|
||||
|
||||
@@ -60,6 +60,7 @@ from .const import (
|
||||
CONF_RETAIN,
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_WILL_MESSAGE,
|
||||
DATA_MQTT_CONFIG,
|
||||
DEFAULT_BIRTH,
|
||||
DEFAULT_DISCOVERY,
|
||||
DEFAULT_PAYLOAD_AVAILABLE,
|
||||
@@ -88,7 +89,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DOMAIN = "mqtt"
|
||||
|
||||
DATA_MQTT = "mqtt"
|
||||
DATA_MQTT_CONFIG = "mqtt_config"
|
||||
|
||||
SERVICE_PUBLISH = "publish"
|
||||
SERVICE_DUMP = "dump"
|
||||
@@ -134,7 +134,7 @@ CONNECTION_FAILED = "connection_failed"
|
||||
CONNECTION_FAILED_RECOVERABLE = "connection_failed_recoverable"
|
||||
|
||||
DISCOVERY_COOLDOWN = 2
|
||||
TIMEOUT_ACK = 1
|
||||
TIMEOUT_ACK = 10
|
||||
|
||||
PLATFORMS = [
|
||||
"alarm_control_panel",
|
||||
|
||||
@@ -24,6 +24,7 @@ from .const import (
|
||||
CONF_BROKER,
|
||||
CONF_DISCOVERY,
|
||||
CONF_WILL_MESSAGE,
|
||||
DATA_MQTT_CONFIG,
|
||||
DEFAULT_BIRTH,
|
||||
DEFAULT_DISCOVERY,
|
||||
DEFAULT_WILL,
|
||||
@@ -162,6 +163,7 @@ class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Manage the MQTT options."""
|
||||
errors = {}
|
||||
current_config = self.config_entry.data
|
||||
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
if user_input is not None:
|
||||
can_connect = await self.hass.async_add_executor_job(
|
||||
try_connection,
|
||||
@@ -178,20 +180,22 @@ class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
fields = OrderedDict()
|
||||
fields[vol.Required(CONF_BROKER, default=current_config[CONF_BROKER])] = str
|
||||
fields[vol.Required(CONF_PORT, default=current_config[CONF_PORT])] = vol.Coerce(
|
||||
int
|
||||
)
|
||||
current_broker = current_config.get(CONF_BROKER, yaml_config.get(CONF_BROKER))
|
||||
current_port = current_config.get(CONF_PORT, yaml_config.get(CONF_PORT))
|
||||
current_user = current_config.get(CONF_USERNAME, yaml_config.get(CONF_USERNAME))
|
||||
current_pass = current_config.get(CONF_PASSWORD, yaml_config.get(CONF_PASSWORD))
|
||||
fields[vol.Required(CONF_BROKER, default=current_broker)] = str
|
||||
fields[vol.Required(CONF_PORT, default=current_port)] = vol.Coerce(int)
|
||||
fields[
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
description={"suggested_value": current_config.get(CONF_USERNAME)},
|
||||
description={"suggested_value": current_user},
|
||||
)
|
||||
] = str
|
||||
fields[
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
description={"suggested_value": current_config.get(CONF_PASSWORD)},
|
||||
description={"suggested_value": current_pass},
|
||||
)
|
||||
] = str
|
||||
|
||||
@@ -205,6 +209,7 @@ class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Manage the MQTT options."""
|
||||
errors = {}
|
||||
current_config = self.config_entry.data
|
||||
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
|
||||
options_config = {}
|
||||
if user_input is not None:
|
||||
bad_birth = False
|
||||
@@ -253,16 +258,24 @@ class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
)
|
||||
return self.async_create_entry(title="", data=None)
|
||||
|
||||
birth = {**DEFAULT_BIRTH, **current_config.get(CONF_BIRTH_MESSAGE, {})}
|
||||
will = {**DEFAULT_WILL, **current_config.get(CONF_WILL_MESSAGE, {})}
|
||||
birth = {
|
||||
**DEFAULT_BIRTH,
|
||||
**current_config.get(
|
||||
CONF_BIRTH_MESSAGE, yaml_config.get(CONF_BIRTH_MESSAGE, {})
|
||||
),
|
||||
}
|
||||
will = {
|
||||
**DEFAULT_WILL,
|
||||
**current_config.get(
|
||||
CONF_WILL_MESSAGE, yaml_config.get(CONF_WILL_MESSAGE, {})
|
||||
),
|
||||
}
|
||||
discovery = current_config.get(
|
||||
CONF_DISCOVERY, yaml_config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY)
|
||||
)
|
||||
|
||||
fields = OrderedDict()
|
||||
fields[
|
||||
vol.Optional(
|
||||
CONF_DISCOVERY,
|
||||
default=current_config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY),
|
||||
)
|
||||
] = bool
|
||||
fields[vol.Optional(CONF_DISCOVERY, default=discovery)] = bool
|
||||
|
||||
# Birth message is disabled if CONF_BIRTH_MESSAGE = {}
|
||||
fields[
|
||||
|
||||
@@ -17,6 +17,8 @@ CONF_RETAIN = ATTR_RETAIN
|
||||
CONF_STATE_TOPIC = "state_topic"
|
||||
CONF_WILL_MESSAGE = "will_message"
|
||||
|
||||
DATA_MQTT_CONFIG = "mqtt_config"
|
||||
|
||||
DEFAULT_PREFIX = "homeassistant"
|
||||
DEFAULT_BIRTH_WILL_TOPIC = DEFAULT_PREFIX + "/status"
|
||||
DEFAULT_DISCOVERY = False
|
||||
|
||||
@@ -284,9 +284,9 @@ class NetatmoCamera(NetatmoBase, Camera):
|
||||
self._data.events.get(self._id, {})
|
||||
)
|
||||
elif self._model == "NOC": # Smart Outdoor Camera
|
||||
self.hass.data[DOMAIN][DATA_EVENTS][
|
||||
self._id
|
||||
] = self._data.outdoor_events.get(self._id, {})
|
||||
self.hass.data[DOMAIN][DATA_EVENTS][self._id] = self.process_events(
|
||||
self._data.outdoor_events.get(self._id, {})
|
||||
)
|
||||
|
||||
def process_events(self, events):
|
||||
"""Add meta data to events."""
|
||||
|
||||
@@ -80,8 +80,20 @@ class NetatmoSource(MediaSource):
|
||||
) -> BrowseMediaSource:
|
||||
if event_id and event_id in self.events[camera_id]:
|
||||
created = dt.datetime.fromtimestamp(event_id)
|
||||
thumbnail = self.events[camera_id][event_id].get("snapshot", {}).get("url")
|
||||
message = remove_html_tags(self.events[camera_id][event_id]["message"])
|
||||
if self.events[camera_id][event_id]["type"] == "outdoor":
|
||||
thumbnail = (
|
||||
self.events[camera_id][event_id]["event_list"][0]
|
||||
.get("snapshot", {})
|
||||
.get("url")
|
||||
)
|
||||
message = remove_html_tags(
|
||||
self.events[camera_id][event_id]["event_list"][0]["message"]
|
||||
)
|
||||
else:
|
||||
thumbnail = (
|
||||
self.events[camera_id][event_id].get("snapshot", {}).get("url")
|
||||
)
|
||||
message = remove_html_tags(self.events[camera_id][event_id]["message"])
|
||||
title = f"{created} - {message}"
|
||||
else:
|
||||
title = self.hass.data[DOMAIN][DATA_CAMERAS].get(camera_id, MANUFACTURER)
|
||||
|
||||
@@ -111,6 +111,7 @@ def setup(hass, config):
|
||||
return False
|
||||
|
||||
hass.data[DOMAIN] = get_data_points(ncm.data)
|
||||
hass.data[DOMAIN]["instance"] = conf[CONF_URL]
|
||||
|
||||
# Update sensors on time interval
|
||||
track_time_interval(hass, nextcloud_update, conf[CONF_SCAN_INTERVAL])
|
||||
|
||||
@@ -38,8 +38,8 @@ def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
|
||||
"""
|
||||
nzbget_api = NZBGetAPI(
|
||||
data[CONF_HOST],
|
||||
data[CONF_USERNAME] if data[CONF_USERNAME] != "" else None,
|
||||
data[CONF_PASSWORD] if data[CONF_PASSWORD] != "" else None,
|
||||
data.get(CONF_USERNAME),
|
||||
data.get(CONF_PASSWORD),
|
||||
data[CONF_SSL],
|
||||
data[CONF_VERIFY_SSL],
|
||||
data[CONF_PORT],
|
||||
|
||||
@@ -29,8 +29,8 @@ class NZBGetDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Initialize global NZBGet data updater."""
|
||||
self.nzbget = NZBGetAPI(
|
||||
config[CONF_HOST],
|
||||
config[CONF_USERNAME] if config[CONF_USERNAME] != "" else None,
|
||||
config[CONF_PASSWORD] if config[CONF_PASSWORD] != "" else None,
|
||||
config.get(CONF_USERNAME),
|
||||
config.get(CONF_PASSWORD),
|
||||
config[CONF_SSL],
|
||||
config[CONF_VERIFY_SSL],
|
||||
config[CONF_PORT],
|
||||
|
||||
@@ -91,7 +91,7 @@ class OpenWeatherMapConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if CONF_LONGITUDE not in config:
|
||||
config[CONF_LONGITUDE] = self.hass.config.longitude
|
||||
if CONF_MODE not in config:
|
||||
config[CONF_MODE] = DEFAULT_LANGUAGE
|
||||
config[CONF_MODE] = DEFAULT_FORECAST_MODE
|
||||
if CONF_LANGUAGE not in config:
|
||||
config[CONF_LANGUAGE] = DEFAULT_LANGUAGE
|
||||
return await self.async_step_user(config)
|
||||
|
||||
@@ -210,7 +210,8 @@ class PingDataSubProcess(PingData):
|
||||
out_error,
|
||||
)
|
||||
|
||||
if pinger.returncode != 0:
|
||||
if pinger.returncode > 1:
|
||||
# returncode of 1 means the host is unreachable
|
||||
_LOGGER.exception(
|
||||
"Error running command: `%s`, return code: %s",
|
||||
" ".join(self._ping_cmd),
|
||||
|
||||
@@ -205,6 +205,7 @@ def special_library_payload(parent_payload, special_type):
|
||||
media_content_type=parent_payload.media_content_type,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=parent_payload.children_media_class,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -94,6 +94,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
api.get_all_devices()
|
||||
|
||||
if entry.unique_id is None:
|
||||
if api.smile_version[0] != "1.8.0":
|
||||
hass.config_entries.async_update_entry(entry, unique_id=api.smile_hostname)
|
||||
|
||||
undo_listener = entry.add_update_listener(_update_listener)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
|
||||
|
||||
@@ -96,6 +96,10 @@ class PlugwiseConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if self.discovery_info:
|
||||
user_input[CONF_HOST] = self.discovery_info[CONF_HOST]
|
||||
|
||||
for entry in self._async_current_entries():
|
||||
if entry.data.get(CONF_HOST) == user_input[CONF_HOST]:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
try:
|
||||
api = await validate_input(self.hass, user_input)
|
||||
|
||||
@@ -106,9 +110,10 @@ class PlugwiseConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
await self.async_set_unique_id(api.gateway_id)
|
||||
await self.async_set_unique_id(
|
||||
api.smile_hostname or api.gateway_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title=api.smile_name, data=user_input)
|
||||
|
||||
@@ -77,6 +77,8 @@ def _precheck_image(image, opts):
|
||||
if imgfmt not in ("PNG", "JPEG"):
|
||||
_LOGGER.warning("Image is of unsupported type: %s", imgfmt)
|
||||
raise ValueError()
|
||||
if not img.mode == "RGB":
|
||||
img = img.convert("RGB")
|
||||
return img
|
||||
|
||||
|
||||
|
||||
@@ -20,4 +20,5 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
if entry and import_config.items() != entry.data.items():
|
||||
self.hass.config_entries.async_update_entry(entry, data=import_config)
|
||||
return self.async_abort(reason="already_configured")
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title="RFXTRX", data=import_config)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/risco",
|
||||
"requirements": [
|
||||
"pyrisco==0.3.0"
|
||||
"pyrisco==0.3.1"
|
||||
],
|
||||
"codeowners": [
|
||||
"@OnFreund"
|
||||
|
||||
@@ -86,7 +86,7 @@ class ShellyDeviceWrapper(update_coordinator.DataUpdateCoordinator):
|
||||
try:
|
||||
async with async_timeout.timeout(5):
|
||||
return await self.device.update()
|
||||
except aiocoap_error.Error as err:
|
||||
except (aiocoap_error.Error, OSError) as err:
|
||||
raise update_coordinator.UpdateFailed("Error fetching data") from err
|
||||
|
||||
@property
|
||||
|
||||
@@ -42,11 +42,11 @@ async def async_setup_entry_attribute_entities(
|
||||
if not blocks:
|
||||
return
|
||||
|
||||
counts = Counter([item[0].type for item in blocks])
|
||||
counts = Counter([item[1] for item in blocks])
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
sensor_class(wrapper, block, sensor_id, description, counts[block.type])
|
||||
sensor_class(wrapper, block, sensor_id, description, counts[sensor_id])
|
||||
for block, sensor_id, description in blocks
|
||||
]
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==0.3.1"],
|
||||
"requirements": ["aioshelly==0.3.2"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "shelly*" }],
|
||||
"codeowners": ["@balloob", "@bieniu"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
wrapper = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
# In roller mode the relay blocks exist but do not contain required info
|
||||
if wrapper.model == "SHSW-25" and wrapper.device.settings["mode"] != "relay":
|
||||
if (
|
||||
wrapper.model in ["SHSW-21", "SHSW-25"]
|
||||
and wrapper.device.settings["mode"] != "relay"
|
||||
):
|
||||
return
|
||||
|
||||
relay_blocks = [block for block in wrapper.device.blocks if block.type == "relay"]
|
||||
|
||||
@@ -202,22 +202,23 @@ class SlackNotificationService(BaseNotificationService):
|
||||
self, targets, message, title, blocks, username, icon
|
||||
):
|
||||
"""Send a text-only message."""
|
||||
if self._icon.lower().startswith(("http://", "https://")):
|
||||
icon_type = "url"
|
||||
else:
|
||||
icon_type = "emoji"
|
||||
message_dict = {
|
||||
"blocks": blocks,
|
||||
"link_names": True,
|
||||
"text": message,
|
||||
"username": username,
|
||||
}
|
||||
|
||||
if self._icon:
|
||||
if self._icon.lower().startswith(("http://", "https://")):
|
||||
icon_type = "url"
|
||||
else:
|
||||
icon_type = "emoji"
|
||||
|
||||
message_dict[f"icon_{icon_type}"] = icon
|
||||
|
||||
tasks = {
|
||||
target: self._client.chat_postMessage(
|
||||
**{
|
||||
"blocks": blocks,
|
||||
"channel": target,
|
||||
"link_names": True,
|
||||
"text": message,
|
||||
"username": username,
|
||||
f"icon_{icon_type}": icon,
|
||||
}
|
||||
)
|
||||
target: self._client.chat_postMessage(**message_dict, channel=target)
|
||||
for target in targets
|
||||
}
|
||||
|
||||
|
||||
@@ -267,7 +267,8 @@ class SolarEdgeStorageLevelSensor(SolarEdgeSensor):
|
||||
"""Get the latest inventory data and update state and attributes."""
|
||||
self.data_service.update()
|
||||
attr = self.data_service.attributes.get(self._json_key)
|
||||
self._state = attr["soc"]
|
||||
if attr and "soc" in attr:
|
||||
self._state = attr["soc"]
|
||||
|
||||
|
||||
class SolarEdgeDataService:
|
||||
|
||||
@@ -1547,6 +1547,13 @@ def library_payload(media_library):
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
if not media_library.browse_by_idstring(
|
||||
"tracks",
|
||||
"",
|
||||
max_items=1,
|
||||
):
|
||||
raise BrowseError("Local library not found")
|
||||
|
||||
children = []
|
||||
for item in media_library.browse():
|
||||
try:
|
||||
|
||||
@@ -143,9 +143,12 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
||||
|
||||
self.servers[DEFAULT_SERVER] = {}
|
||||
for server in sorted(
|
||||
server_list.values(), key=lambda server: server[0]["country"]
|
||||
server_list.values(),
|
||||
key=lambda server: server[0]["country"] + server[0]["sponsor"],
|
||||
):
|
||||
self.servers[f"{server[0]['country']} - {server[0]['sponsor']}"] = server[0]
|
||||
self.servers[
|
||||
f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}"
|
||||
] = server[0]
|
||||
|
||||
def update_data(self):
|
||||
"""Get the latest data from speedtest.net."""
|
||||
|
||||
@@ -122,6 +122,7 @@ class SQLSensor(Entity):
|
||||
def update(self):
|
||||
"""Retrieve sensor data from the query."""
|
||||
|
||||
data = None
|
||||
try:
|
||||
sess = self.sessionmaker()
|
||||
result = sess.execute(self._query)
|
||||
@@ -147,7 +148,7 @@ class SQLSensor(Entity):
|
||||
finally:
|
||||
sess.close()
|
||||
|
||||
if self._template is not None:
|
||||
if data is not None and self._template is not None:
|
||||
self._state = self._template.async_render_with_possible_json_value(
|
||||
data, None
|
||||
)
|
||||
|
||||
@@ -148,7 +148,8 @@ class HlsStreamOutput(StreamOutput):
|
||||
def container_options(self) -> Callable[[int], dict]:
|
||||
"""Return Callable which takes a sequence number and returns container options."""
|
||||
return lambda sequence: {
|
||||
"movflags": "frag_custom+empty_moov+default_base_moof+skip_sidx+frag_discont",
|
||||
# Removed skip_sidx - see https://github.com/home-assistant/core/pull/39970
|
||||
"movflags": "frag_custom+empty_moov+default_base_moof+frag_discont",
|
||||
"avoid_negative_ts": "make_non_negative",
|
||||
"fragment_index": str(sequence),
|
||||
}
|
||||
|
||||
@@ -77,6 +77,9 @@ def _stream_worker_internal(hass, stream, quit_event):
|
||||
# compatible with empty_moov and manual bitstream filters not in PyAV
|
||||
if container.format.name in {"hls", "mpegts"}:
|
||||
audio_stream = None
|
||||
# Some audio streams do not have a profile and throw errors when remuxing
|
||||
if audio_stream and audio_stream.profile is None:
|
||||
audio_stream = None
|
||||
|
||||
# The presentation timestamps of the first packet in each stream we receive
|
||||
# Use to adjust before muxing or outputting, but we don't adjust internally
|
||||
@@ -113,7 +116,11 @@ def _stream_worker_internal(hass, stream, quit_event):
|
||||
# Get to first video keyframe
|
||||
while first_packet[video_stream] is None:
|
||||
packet = next(container.demux())
|
||||
if packet.stream == video_stream and packet.is_keyframe:
|
||||
if (
|
||||
packet.stream == video_stream
|
||||
and packet.is_keyframe
|
||||
and packet.dts is not None
|
||||
):
|
||||
first_packet[video_stream] = packet
|
||||
initial_packets.append(packet)
|
||||
# Get first_pts from subsequent frame to first keyframe
|
||||
@@ -121,6 +128,8 @@ def _stream_worker_internal(hass, stream, quit_event):
|
||||
[pts is None for pts in {**first_packet, **first_pts}.values()]
|
||||
) and (len(initial_packets) < PACKETS_TO_WAIT_FOR_AUDIO):
|
||||
packet = next(container.demux((video_stream, audio_stream)))
|
||||
if packet.dts is None:
|
||||
continue # Discard packets with no dts
|
||||
if (
|
||||
first_packet[packet.stream] is None
|
||||
): # actually video already found above so only for audio
|
||||
|
||||
@@ -70,7 +70,7 @@ class TagStorageCollection(collection.StorageCollection):
|
||||
data[TAG_ID] = str(uuid.uuid4())
|
||||
# make last_scanned JSON serializeable
|
||||
if LAST_SCANNED in data:
|
||||
data[LAST_SCANNED] = str(data[LAST_SCANNED])
|
||||
data[LAST_SCANNED] = data[LAST_SCANNED].isoformat()
|
||||
return data
|
||||
|
||||
@callback
|
||||
@@ -82,8 +82,8 @@ class TagStorageCollection(collection.StorageCollection):
|
||||
"""Return a new updated data object."""
|
||||
data = {**data, **self.UPDATE_SCHEMA(update_data)}
|
||||
# make last_scanned JSON serializeable
|
||||
if LAST_SCANNED in data:
|
||||
data[LAST_SCANNED] = str(data[LAST_SCANNED])
|
||||
if LAST_SCANNED in update_data:
|
||||
data[LAST_SCANNED] = data[LAST_SCANNED].isoformat()
|
||||
return data
|
||||
|
||||
|
||||
@@ -100,6 +100,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
).async_setup(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -145,8 +145,9 @@ class UniversalMediaPlayer(MediaPlayerEntity):
|
||||
"""Subscribe to children and template state changes."""
|
||||
|
||||
@callback
|
||||
def _async_on_dependency_update(*_):
|
||||
def _async_on_dependency_update(event):
|
||||
"""Update ha state when dependencies update."""
|
||||
self.async_set_context(event.context)
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
@callback
|
||||
@@ -158,6 +159,10 @@ class UniversalMediaPlayer(MediaPlayerEntity):
|
||||
self._state_template_result = None
|
||||
else:
|
||||
self._state_template_result = result
|
||||
|
||||
if event:
|
||||
self.async_set_context(event.context)
|
||||
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
if self._state_template is not None:
|
||||
|
||||
@@ -67,7 +67,7 @@ class Device:
|
||||
"""Create UPnP/IGD device."""
|
||||
# build async_upnp_client requester
|
||||
session = async_get_clientsession(hass)
|
||||
requester = AiohttpSessionRequester(session, True)
|
||||
requester = AiohttpSessionRequester(session, True, 10)
|
||||
|
||||
# create async_upnp_client device
|
||||
factory = UpnpFactory(requester, disable_state_variable_validation=True)
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
"domain": "velux",
|
||||
"name": "Velux",
|
||||
"documentation": "https://www.home-assistant.io/integrations/velux",
|
||||
"requirements": ["pyvlx==0.2.16"],
|
||||
"requirements": ["pyvlx==0.2.17"],
|
||||
"codeowners": ["@Julius2342"]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
"name": "Vera",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/vera",
|
||||
"requirements": ["pyvera==0.3.9"],
|
||||
"requirements": ["pyvera==0.3.10"],
|
||||
"codeowners": ["@vangorra"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "version",
|
||||
"name": "Version",
|
||||
"documentation": "https://www.home-assistant.io/integrations/version",
|
||||
"requirements": ["pyhaversion==3.3.0"],
|
||||
"codeowners": ["@fabaff"],
|
||||
"requirements": ["pyhaversion==3.4.2"],
|
||||
"codeowners": ["@fabaff", "@ludeeus"],
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
||||
@@ -35,6 +35,7 @@ ALL_IMAGES = [
|
||||
"raspberrypi4-64",
|
||||
"tinker",
|
||||
"odroid-c2",
|
||||
"odroid-n2",
|
||||
"odroid-xu",
|
||||
]
|
||||
ALL_SOURCES = ["local", "pypi", "hassio", "docker", "haio"]
|
||||
|
||||
@@ -94,7 +94,7 @@ async def async_unload_entry(
|
||||
and entry.data[CONF_DEVICE_CLASS] == DEVICE_CLASS_TV
|
||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
):
|
||||
hass.data[DOMAIN].pop(CONF_APPS)
|
||||
hass.data[DOMAIN].pop(CONF_APPS, None)
|
||||
|
||||
if not hass.data[DOMAIN]:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
@@ -305,7 +305,9 @@ class LgWebOSMediaPlayerEntity(MediaPlayerEntity):
|
||||
"""Flag media player features that are supported."""
|
||||
supported = SUPPORT_WEBOSTV
|
||||
|
||||
if self._client.sound_output == "external_arc":
|
||||
if (self._client.sound_output == "external_arc") or (
|
||||
self._client.sound_output == "external_speaker"
|
||||
):
|
||||
supported = supported | SUPPORT_WEBOSTV_VOLUME
|
||||
elif self._client.sound_output != "lineout":
|
||||
supported = supported | SUPPORT_WEBOSTV_VOLUME | SUPPORT_VOLUME_SET
|
||||
|
||||
@@ -60,7 +60,7 @@ class DiscoverYandexTransport(Entity):
|
||||
self._name = name
|
||||
self._attrs = None
|
||||
|
||||
async def async_update(self):
|
||||
async def async_update(self, *, tries=0):
|
||||
"""Get the latest data from maps.yandex.ru and update the states."""
|
||||
attrs = {}
|
||||
closer_time = None
|
||||
@@ -73,8 +73,12 @@ class DiscoverYandexTransport(Entity):
|
||||
key_error,
|
||||
yandex_reply,
|
||||
)
|
||||
if tries > 0:
|
||||
return
|
||||
await self.requester.set_new_session()
|
||||
data = (await self.requester.get_stop_info(self._stop_id))["data"]
|
||||
await self.async_update(tries=tries + 1)
|
||||
return
|
||||
|
||||
stop_name = data["name"]
|
||||
transport_list = data["transports"]
|
||||
for transport in transport_list:
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zha",
|
||||
"requirements": [
|
||||
"bellows==0.20.1",
|
||||
"bellows==0.20.2",
|
||||
"pyserial==3.4",
|
||||
"zha-quirks==0.0.44",
|
||||
"zigpy-cc==0.5.2",
|
||||
"zigpy-deconz==0.9.2",
|
||||
"zigpy==0.23.2",
|
||||
"zigpy-deconz==0.10.0",
|
||||
"zigpy==0.24.1",
|
||||
"zigpy-xbee==0.13.0",
|
||||
"zigpy-zigate==0.6.2",
|
||||
"zigpy-znp==0.1.1"
|
||||
|
||||
@@ -33,6 +33,7 @@ from homeassistant.const import (
|
||||
CONF_INTERNAL_URL,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_MEDIA_DIRS,
|
||||
CONF_NAME,
|
||||
CONF_PACKAGES,
|
||||
CONF_TEMPERATURE_UNIT,
|
||||
@@ -221,6 +222,8 @@ CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend(
|
||||
],
|
||||
_no_duplicate_auth_mfa_module,
|
||||
),
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -485,6 +488,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
||||
CONF_UNIT_SYSTEM,
|
||||
CONF_EXTERNAL_URL,
|
||||
CONF_INTERNAL_URL,
|
||||
CONF_MEDIA_DIRS,
|
||||
]
|
||||
):
|
||||
hac.config_source = SOURCE_YAML
|
||||
@@ -496,6 +500,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
||||
(CONF_ELEVATION, "elevation"),
|
||||
(CONF_INTERNAL_URL, "internal_url"),
|
||||
(CONF_EXTERNAL_URL, "external_url"),
|
||||
(CONF_MEDIA_DIRS, "media_dirs"),
|
||||
):
|
||||
if key in config:
|
||||
setattr(hac, attr, config[key])
|
||||
@@ -503,8 +508,14 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
||||
if CONF_TIME_ZONE in config:
|
||||
hac.set_time_zone(config[CONF_TIME_ZONE])
|
||||
|
||||
if CONF_MEDIA_DIRS not in config:
|
||||
if is_docker_env():
|
||||
hac.media_dirs = {"local": "/media"}
|
||||
else:
|
||||
hac.media_dirs = {"local": hass.config.path("media")}
|
||||
|
||||
# Init whitelist external dir
|
||||
hac.allowlist_external_dirs = {hass.config.path("www"), hass.config.path("media")}
|
||||
hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()}
|
||||
if CONF_ALLOWLIST_EXTERNAL_DIRS in config:
|
||||
hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS]))
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 115
|
||||
PATCH_VERSION = "0b7"
|
||||
PATCH_VERSION = "3"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 1)
|
||||
@@ -116,6 +116,7 @@ CONF_LIGHTS = "lights"
|
||||
CONF_LONGITUDE = "longitude"
|
||||
CONF_MAC = "mac"
|
||||
CONF_MAXIMUM = "maximum"
|
||||
CONF_MEDIA_DIRS = "media_dirs"
|
||||
CONF_METHOD = "method"
|
||||
CONF_MINIMUM = "minimum"
|
||||
CONF_MODE = "mode"
|
||||
|
||||
@@ -1390,6 +1390,9 @@ class Config:
|
||||
# List of allowed external URLs that integrations may use
|
||||
self.allowlist_external_urls: Set[str] = set()
|
||||
|
||||
# Dictionary of Media folders that integrations may use
|
||||
self.media_dirs: Dict[str, str] = {}
|
||||
|
||||
# If Home Assistant is running in safe mode
|
||||
self.safe_mode: bool = False
|
||||
|
||||
|
||||
@@ -649,13 +649,16 @@ async def async_validate_condition_config(
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_entities(config: ConfigType) -> Set[str]:
|
||||
def async_extract_entities(config: Union[ConfigType, Template]) -> Set[str]:
|
||||
"""Extract entities from a condition."""
|
||||
referenced: Set[str] = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
config = to_process.popleft()
|
||||
if isinstance(config, Template):
|
||||
continue
|
||||
|
||||
condition = config[CONF_CONDITION]
|
||||
|
||||
if condition in ("and", "not", "or"):
|
||||
@@ -674,13 +677,16 @@ def async_extract_entities(config: ConfigType) -> Set[str]:
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_devices(config: ConfigType) -> Set[str]:
|
||||
def async_extract_devices(config: Union[ConfigType, Template]) -> Set[str]:
|
||||
"""Extract devices from a condition."""
|
||||
referenced = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
config = to_process.popleft()
|
||||
if isinstance(config, Template):
|
||||
continue
|
||||
|
||||
condition = config[CONF_CONDITION]
|
||||
|
||||
if condition in ("and", "not", "or"):
|
||||
|
||||
@@ -525,6 +525,11 @@ class _TrackTemplateResultInfo:
|
||||
|
||||
self._last_info = self._info.copy()
|
||||
self._create_listeners()
|
||||
_LOGGER.debug(
|
||||
"Template group %s listens for %s",
|
||||
self._track_templates,
|
||||
self.listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def listeners(self) -> Dict:
|
||||
@@ -683,6 +688,10 @@ class _TrackTemplateResultInfo:
|
||||
):
|
||||
continue
|
||||
|
||||
_LOGGER.debug(
|
||||
"Template update %s triggered by event: %s", template.template, event
|
||||
)
|
||||
|
||||
self._info[template] = template.async_render_to_info(
|
||||
track_template_.variables
|
||||
)
|
||||
@@ -708,6 +717,11 @@ class _TrackTemplateResultInfo:
|
||||
|
||||
if info_changed:
|
||||
self._update_listeners()
|
||||
_LOGGER.debug(
|
||||
"Template group %s listens for %s",
|
||||
self._track_templates,
|
||||
self.listeners,
|
||||
)
|
||||
self._last_info = self._info.copy()
|
||||
|
||||
if not updates:
|
||||
|
||||
@@ -13,7 +13,7 @@ defusedxml==0.6.0
|
||||
distro==1.5.0
|
||||
emoji==0.5.4
|
||||
hass-nabucasa==0.37.0
|
||||
home-assistant-frontend==20200909.0
|
||||
home-assistant-frontend==20200918.2
|
||||
importlib-metadata==1.6.0;python_version<'3.8'
|
||||
jinja2>=2.11.2
|
||||
netdisco==2.8.2
|
||||
|
||||
@@ -102,7 +102,7 @@ YesssSMS==0.4.1
|
||||
abodepy==1.1.0
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==0.0.10
|
||||
accuweather==0.0.11
|
||||
|
||||
# homeassistant.components.mcp23017
|
||||
adafruit-blinka==3.9.0
|
||||
@@ -178,7 +178,7 @@ aioguardian==1.0.1
|
||||
aioharmony==0.2.6
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.49
|
||||
aiohomekit==0.2.53
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -221,7 +221,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.1
|
||||
aioshelly==0.3.2
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -248,7 +248,7 @@ ambiclimate==0.2.1
|
||||
amcrest==1.7.0
|
||||
|
||||
# homeassistant.components.androidtv
|
||||
androidtv[async]==0.0.49
|
||||
androidtv[async]==0.0.50
|
||||
|
||||
# homeassistant.components.anel_pwrctrl
|
||||
anel_pwrctrl-homeassistant==0.0.1.dev2
|
||||
@@ -309,7 +309,7 @@ av==8.0.2
|
||||
avri-api==0.1.7
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==35
|
||||
axis==37
|
||||
|
||||
# homeassistant.components.azure_event_hub
|
||||
azure-eventhub==5.1.0
|
||||
@@ -339,7 +339,7 @@ beautifulsoup4==4.9.1
|
||||
# beewi_smartclim==0.0.7
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.20.1
|
||||
bellows==0.20.2
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.7.7
|
||||
@@ -669,7 +669,7 @@ glances_api==0.2.0
|
||||
gntp==1.0.3
|
||||
|
||||
# homeassistant.components.gogogate2
|
||||
gogogate2-api==2.0.1
|
||||
gogogate2-api==2.0.3
|
||||
|
||||
# homeassistant.components.google
|
||||
google-api-python-client==1.6.4
|
||||
@@ -717,7 +717,7 @@ ha-philipsjs==0.0.8
|
||||
habitipy==0.2.0
|
||||
|
||||
# homeassistant.components.hangouts
|
||||
hangups==0.4.10
|
||||
hangups==0.4.11
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.37.0
|
||||
@@ -747,7 +747,7 @@ hole==0.5.1
|
||||
holidays==0.10.3
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200909.0
|
||||
home-assistant-frontend==20200918.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -1277,7 +1277,7 @@ pycocotools==2.0.1
|
||||
pycomfoconnect==0.3
|
||||
|
||||
# homeassistant.components.coolmaster
|
||||
pycoolmasternet-async==0.1.1
|
||||
pycoolmasternet-async==0.1.2
|
||||
|
||||
# homeassistant.components.avri
|
||||
pycountry==19.8.18
|
||||
@@ -1295,7 +1295,7 @@ pydaikin==2.3.1
|
||||
pydanfossair==0.1.0
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==72
|
||||
pydeconz==73
|
||||
|
||||
# homeassistant.components.delijn
|
||||
pydelijn==0.6.1
|
||||
@@ -1377,7 +1377,7 @@ pygtfs==0.1.5
|
||||
pygti==0.6.0
|
||||
|
||||
# homeassistant.components.version
|
||||
pyhaversion==3.3.0
|
||||
pyhaversion==3.4.2
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==0.6.0
|
||||
@@ -1401,7 +1401,7 @@ pyialarm==0.3
|
||||
pyicloud==0.9.7
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.0.7
|
||||
pyinsteon==1.0.8
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.7.5
|
||||
@@ -1595,7 +1595,7 @@ pyrecswitch==1.0.2
|
||||
pyrepetier==3.0.5
|
||||
|
||||
# homeassistant.components.risco
|
||||
pyrisco==0.3.0
|
||||
pyrisco==0.3.1
|
||||
|
||||
# homeassistant.components.sabnzbd
|
||||
pysabnzbd==1.1.0
|
||||
@@ -1825,7 +1825,7 @@ pyuptimerobot==0.0.5
|
||||
# pyuserinput==0.1.11
|
||||
|
||||
# homeassistant.components.vera
|
||||
pyvera==0.3.9
|
||||
pyvera==0.3.10
|
||||
|
||||
# homeassistant.components.versasense
|
||||
pyversasense==0.0.6
|
||||
@@ -1837,7 +1837,7 @@ pyvesync==1.1.0
|
||||
pyvizio==0.1.56
|
||||
|
||||
# homeassistant.components.velux
|
||||
pyvlx==0.2.16
|
||||
pyvlx==0.2.17
|
||||
|
||||
# homeassistant.components.volumio
|
||||
pyvolumio==0.1.2
|
||||
@@ -2302,7 +2302,7 @@ ziggo-mediabox-xl==1.1.0
|
||||
zigpy-cc==0.5.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.9.2
|
||||
zigpy-deconz==0.10.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee==0.13.0
|
||||
@@ -2314,7 +2314,7 @@ zigpy-zigate==0.6.2
|
||||
zigpy-znp==0.1.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.23.2
|
||||
zigpy==0.24.1
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.4.0
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
asynctest==0.13.0
|
||||
codecov==2.1.0
|
||||
coverage==5.2.1
|
||||
jsonpickle==1.4.1
|
||||
mock-open==1.4.0
|
||||
mypy==0.780
|
||||
pre-commit==2.7.1
|
||||
|
||||
@@ -45,7 +45,7 @@ YesssSMS==0.4.1
|
||||
abodepy==1.1.0
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==0.0.10
|
||||
accuweather==0.0.11
|
||||
|
||||
# homeassistant.components.androidtv
|
||||
adb-shell[async]==0.2.1
|
||||
@@ -103,7 +103,7 @@ aioguardian==1.0.1
|
||||
aioharmony==0.2.6
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.49
|
||||
aiohomekit==0.2.53
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -131,7 +131,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.1
|
||||
aioshelly==0.3.2
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -149,7 +149,7 @@ airly==0.0.2
|
||||
ambiclimate==0.2.1
|
||||
|
||||
# homeassistant.components.androidtv
|
||||
androidtv[async]==0.0.49
|
||||
androidtv[async]==0.0.50
|
||||
|
||||
# homeassistant.components.apns
|
||||
apns2==0.3.0
|
||||
@@ -174,7 +174,7 @@ av==8.0.2
|
||||
avri-api==0.1.7
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==35
|
||||
axis==37
|
||||
|
||||
# homeassistant.components.azure_event_hub
|
||||
azure-eventhub==5.1.0
|
||||
@@ -183,7 +183,7 @@ azure-eventhub==5.1.0
|
||||
base36==0.1.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.20.1
|
||||
bellows==0.20.2
|
||||
|
||||
# homeassistant.components.blebox
|
||||
blebox_uniapi==1.3.2
|
||||
@@ -334,7 +334,7 @@ gios==0.1.4
|
||||
glances_api==0.2.0
|
||||
|
||||
# homeassistant.components.gogogate2
|
||||
gogogate2-api==2.0.1
|
||||
gogogate2-api==2.0.3
|
||||
|
||||
# homeassistant.components.google
|
||||
google-api-python-client==1.6.4
|
||||
@@ -349,7 +349,7 @@ griddypower==0.1.0
|
||||
ha-ffmpeg==2.0
|
||||
|
||||
# homeassistant.components.hangouts
|
||||
hangups==0.4.10
|
||||
hangups==0.4.11
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.37.0
|
||||
@@ -370,7 +370,7 @@ hole==0.5.1
|
||||
holidays==0.10.3
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200909.0
|
||||
home-assistant-frontend==20200918.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -616,7 +616,7 @@ pybotvac==0.0.17
|
||||
pychromecast==7.2.1
|
||||
|
||||
# homeassistant.components.coolmaster
|
||||
pycoolmasternet-async==0.1.1
|
||||
pycoolmasternet-async==0.1.2
|
||||
|
||||
# homeassistant.components.avri
|
||||
pycountry==19.8.18
|
||||
@@ -625,7 +625,7 @@ pycountry==19.8.18
|
||||
pydaikin==2.3.1
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==72
|
||||
pydeconz==73
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.0
|
||||
@@ -662,7 +662,7 @@ pygatt[GATTTOOL]==4.0.5
|
||||
pygti==0.6.0
|
||||
|
||||
# homeassistant.components.version
|
||||
pyhaversion==3.3.0
|
||||
pyhaversion==3.4.2
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==0.6.0
|
||||
@@ -674,7 +674,7 @@ pyhomematic==0.1.68
|
||||
pyicloud==0.9.7
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.0.7
|
||||
pyinsteon==1.0.8
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==2.0.5
|
||||
@@ -769,7 +769,7 @@ pyps4-2ndscreen==1.1.1
|
||||
pyqwikswitch==0.93
|
||||
|
||||
# homeassistant.components.risco
|
||||
pyrisco==0.3.0
|
||||
pyrisco==0.3.1
|
||||
|
||||
# homeassistant.components.acer_projector
|
||||
# homeassistant.components.zha
|
||||
@@ -854,7 +854,7 @@ pytraccar==0.9.0
|
||||
pytradfri[async]==7.0.2
|
||||
|
||||
# homeassistant.components.vera
|
||||
pyvera==0.3.9
|
||||
pyvera==0.3.10
|
||||
|
||||
# homeassistant.components.vesync
|
||||
pyvesync==1.1.0
|
||||
@@ -1062,7 +1062,7 @@ zha-quirks==0.0.44
|
||||
zigpy-cc==0.5.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.9.2
|
||||
zigpy-deconz==0.10.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee==0.13.0
|
||||
@@ -1074,4 +1074,4 @@ zigpy-zigate==0.6.2
|
||||
zigpy-znp==0.1.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.23.2
|
||||
zigpy==0.24.1
|
||||
|
||||
@@ -205,6 +205,7 @@ async def async_test_home_assistant(loop):
|
||||
hass.config.elevation = 0
|
||||
hass.config.time_zone = date_util.get_time_zone("US/Pacific")
|
||||
hass.config.units = METRIC_SYSTEM
|
||||
hass.config.media_dirs = {"local": get_test_config_dir("media")}
|
||||
hass.config.skip_pip = True
|
||||
|
||||
hass.config_entries = config_entries.ConfigEntries(hass, {})
|
||||
|
||||
@@ -9,7 +9,14 @@ from homeassistant.components.homeassistant import (
|
||||
DOMAIN as HA_DOMAIN,
|
||||
SERVICE_UPDATE_ENTITY,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_RELOAD, STATE_UNKNOWN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
SERVICE_RELOAD,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import Context, callback
|
||||
from homeassistant.setup import async_setup_component, setup_component
|
||||
|
||||
from tests.async_mock import patch
|
||||
@@ -686,3 +693,79 @@ async def test_reload(hass):
|
||||
|
||||
def _get_fixtures_base_path():
|
||||
return path.dirname(path.dirname(path.dirname(__file__)))
|
||||
|
||||
|
||||
async def test_template_triggers(hass):
|
||||
"""Test sensor with template triggers."""
|
||||
hass.states.async_set("input_boolean.test", STATE_OFF)
|
||||
config = {
|
||||
"binary_sensor": {
|
||||
"name": "Test_Binary",
|
||||
"platform": "bayesian",
|
||||
"observations": [
|
||||
{
|
||||
"platform": "template",
|
||||
"value_template": "{{ states.input_boolean.test.state }}",
|
||||
"prob_given_true": 1999.9,
|
||||
},
|
||||
],
|
||||
"prior": 0.2,
|
||||
"probability_threshold": 0.32,
|
||||
}
|
||||
}
|
||||
|
||||
await async_setup_component(hass, "binary_sensor", config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF
|
||||
|
||||
events = []
|
||||
hass.helpers.event.async_track_state_change_event(
|
||||
"binary_sensor.test_binary", callback(lambda event: events.append(event))
|
||||
)
|
||||
|
||||
context = Context()
|
||||
hass.states.async_set("input_boolean.test", STATE_ON, context=context)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert events[0].context == context
|
||||
|
||||
|
||||
async def test_state_triggers(hass):
|
||||
"""Test sensor with state triggers."""
|
||||
hass.states.async_set("sensor.test_monitored", STATE_OFF)
|
||||
|
||||
config = {
|
||||
"binary_sensor": {
|
||||
"name": "Test_Binary",
|
||||
"platform": "bayesian",
|
||||
"observations": [
|
||||
{
|
||||
"platform": "state",
|
||||
"entity_id": "sensor.test_monitored",
|
||||
"to_state": "off",
|
||||
"prob_given_true": 999.9,
|
||||
"prob_given_false": 999.4,
|
||||
},
|
||||
],
|
||||
"prior": 0.2,
|
||||
"probability_threshold": 0.32,
|
||||
}
|
||||
}
|
||||
await async_setup_component(hass, "binary_sensor", config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF
|
||||
|
||||
events = []
|
||||
hass.helpers.event.async_track_state_change_event(
|
||||
"binary_sensor.test_binary", callback(lambda event: events.append(event))
|
||||
)
|
||||
|
||||
context = Context()
|
||||
hass.states.async_set("sensor.test_monitored", STATE_ON, context=context)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert events[0].context == context
|
||||
|
||||
@@ -56,6 +56,16 @@ BROADLINK_DEVICES = {
|
||||
20025,
|
||||
5,
|
||||
),
|
||||
"Kitchen": ( # Not supported.
|
||||
"192.168.0.64",
|
||||
"34ea34b61d2c",
|
||||
"LB1",
|
||||
"Broadlink",
|
||||
"SmartBulb",
|
||||
0x504E,
|
||||
57,
|
||||
5,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -85,6 +95,9 @@ class BroadlinkDevice:
|
||||
with patch(
|
||||
"homeassistant.components.broadlink.device.blk.gendevice",
|
||||
return_value=mock_api,
|
||||
), patch(
|
||||
"homeassistant.components.broadlink.updater.blk.discover",
|
||||
return_value=[mock_api],
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -12,13 +12,16 @@ from . import get_device
|
||||
|
||||
from tests.async_mock import call, patch
|
||||
|
||||
DEVICE_DISCOVERY = "homeassistant.components.broadlink.config_flow.blk.discover"
|
||||
DEVICE_FACTORY = "homeassistant.components.broadlink.config_flow.blk.gendevice"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def broadlink_setup_fixture():
|
||||
"""Mock broadlink entry setup."""
|
||||
with patch(
|
||||
"homeassistant.components.broadlink.async_setup_entry", return_value=True
|
||||
):
|
||||
"homeassistant.components.broadlink.async_setup", return_value=True
|
||||
), patch("homeassistant.components.broadlink.async_setup_entry", return_value=True):
|
||||
yield
|
||||
|
||||
|
||||
@@ -38,7 +41,7 @@ async def test_flow_user_works(hass):
|
||||
assert result["step_id"] == "user"
|
||||
assert result["errors"] == {}
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]) as mock_discover:
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -69,7 +72,7 @@ async def test_flow_user_already_in_progress(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[device.get_mock_api()]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -79,7 +82,7 @@ async def test_flow_user_already_in_progress(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[device.get_mock_api()]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -106,7 +109,7 @@ async def test_flow_user_mac_already_configured(hass):
|
||||
device.timeout = 20
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -125,7 +128,7 @@ async def test_flow_user_invalid_ip_address(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", side_effect=OSError(errno.EINVAL, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.EINVAL, None)):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": "0.0.0.1"},
|
||||
@@ -142,7 +145,7 @@ async def test_flow_user_invalid_hostname(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", side_effect=OSError(socket.EAI_NONAME, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(socket.EAI_NONAME, None)):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": "pancakemaster.local"},
|
||||
@@ -161,7 +164,7 @@ async def test_flow_user_device_not_found(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
@@ -172,13 +175,32 @@ async def test_flow_user_device_not_found(hass):
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
|
||||
async def test_flow_user_device_not_supported(hass):
|
||||
"""Test we handle a device not supported in the user step."""
|
||||
device = get_device("Kitchen")
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
)
|
||||
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_supported"
|
||||
|
||||
|
||||
async def test_flow_user_network_unreachable(hass):
|
||||
"""Test we handle a network unreachable in the user step."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", side_effect=OSError(errno.ENETUNREACH, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.ENETUNREACH, None)):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": "192.168.1.32"},
|
||||
@@ -195,7 +217,7 @@ async def test_flow_user_os_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", side_effect=OSError()):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError()):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": "192.168.1.32"},
|
||||
@@ -216,7 +238,7 @@ async def test_flow_auth_authentication_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -237,7 +259,7 @@ async def test_flow_auth_device_offline(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
@@ -258,7 +280,7 @@ async def test_flow_auth_firmware_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
@@ -279,7 +301,7 @@ async def test_flow_auth_network_unreachable(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
@@ -300,7 +322,7 @@ async def test_flow_auth_os_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host},
|
||||
@@ -321,13 +343,13 @@ async def test_flow_reset_works(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[device.get_mock_api()]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -353,7 +375,7 @@ async def test_flow_unlock_works(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -392,7 +414,7 @@ async def test_flow_unlock_device_offline(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -419,7 +441,7 @@ async def test_flow_unlock_firmware_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -446,7 +468,7 @@ async def test_flow_unlock_network_unreachable(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -473,7 +495,7 @@ async def test_flow_unlock_os_error(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -499,7 +521,7 @@ async def test_flow_do_not_unlock(hass):
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -527,7 +549,7 @@ async def test_flow_import_works(hass):
|
||||
device = get_device("Living Room")
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]) as mock_discover:
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -558,12 +580,12 @@ async def test_flow_import_already_in_progress(hass):
|
||||
device = get_device("Living Room")
|
||||
data = {"host": device.host}
|
||||
|
||||
with patch("broadlink.discover", return_value=[device.get_mock_api()]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data
|
||||
)
|
||||
|
||||
with patch("broadlink.discover", return_value=[device.get_mock_api()]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data
|
||||
)
|
||||
@@ -579,7 +601,7 @@ async def test_flow_import_host_already_configured(hass):
|
||||
mock_entry.add_to_hass(hass)
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -602,7 +624,7 @@ async def test_flow_import_mac_already_configured(hass):
|
||||
device.host = "192.168.1.16"
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -620,7 +642,7 @@ async def test_flow_import_mac_already_configured(hass):
|
||||
|
||||
async def test_flow_import_device_not_found(hass):
|
||||
"""Test we handle a device not found in the import step."""
|
||||
with patch("broadlink.discover", return_value=[]):
|
||||
with patch(DEVICE_DISCOVERY, return_value=[]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -631,9 +653,25 @@ async def test_flow_import_device_not_found(hass):
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
|
||||
async def test_flow_import_device_not_supported(hass):
|
||||
"""Test we handle a device not supported in the import step."""
|
||||
device = get_device("Kitchen")
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data={"host": device.host},
|
||||
)
|
||||
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_supported"
|
||||
|
||||
|
||||
async def test_flow_import_invalid_ip_address(hass):
|
||||
"""Test we handle an invalid IP address in the import step."""
|
||||
with patch("broadlink.discover", side_effect=OSError(errno.EINVAL, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.EINVAL, None)):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -646,7 +684,7 @@ async def test_flow_import_invalid_ip_address(hass):
|
||||
|
||||
async def test_flow_import_invalid_hostname(hass):
|
||||
"""Test we handle an invalid hostname in the import step."""
|
||||
with patch("broadlink.discover", side_effect=OSError(socket.EAI_NONAME, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(socket.EAI_NONAME, None)):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -659,7 +697,7 @@ async def test_flow_import_invalid_hostname(hass):
|
||||
|
||||
async def test_flow_import_network_unreachable(hass):
|
||||
"""Test we handle a network unreachable in the import step."""
|
||||
with patch("broadlink.discover", side_effect=OSError(errno.ENETUNREACH, None)):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.ENETUNREACH, None)):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -672,7 +710,7 @@ async def test_flow_import_network_unreachable(hass):
|
||||
|
||||
async def test_flow_import_os_error(hass):
|
||||
"""Test we handle an OS error in the import step."""
|
||||
with patch("broadlink.discover", side_effect=OSError()):
|
||||
with patch(DEVICE_DISCOVERY, side_effect=OSError()):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
@@ -692,7 +730,7 @@ async def test_flow_reauth_works(hass):
|
||||
mock_api.auth.side_effect = blke.AuthenticationError()
|
||||
data = {"name": device.name, **device.get_entry_data()}
|
||||
|
||||
with patch("broadlink.gendevice", return_value=mock_api):
|
||||
with patch(DEVICE_FACTORY, return_value=mock_api):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "reauth"}, data=data
|
||||
)
|
||||
@@ -702,7 +740,7 @@ async def test_flow_reauth_works(hass):
|
||||
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]) as mock_discover:
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -728,7 +766,7 @@ async def test_flow_reauth_invalid_host(hass):
|
||||
mock_api.auth.side_effect = blke.AuthenticationError()
|
||||
data = {"name": device.name, **device.get_entry_data()}
|
||||
|
||||
with patch("broadlink.gendevice", return_value=mock_api):
|
||||
with patch(DEVICE_FACTORY, return_value=mock_api):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "reauth"}, data=data
|
||||
)
|
||||
@@ -736,7 +774,7 @@ async def test_flow_reauth_invalid_host(hass):
|
||||
device.mac = get_device("Office").mac
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]) as mock_discover:
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
@@ -762,7 +800,7 @@ async def test_flow_reauth_valid_host(hass):
|
||||
mock_api.auth.side_effect = blke.AuthenticationError()
|
||||
data = {"name": device.name, **device.get_entry_data()}
|
||||
|
||||
with patch("broadlink.gendevice", return_value=mock_api):
|
||||
with patch(DEVICE_FACTORY, return_value=mock_api):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "reauth"}, data=data
|
||||
)
|
||||
@@ -770,7 +808,7 @@ async def test_flow_reauth_valid_host(hass):
|
||||
device.host = "192.168.1.128"
|
||||
mock_api = device.get_mock_api()
|
||||
|
||||
with patch("broadlink.discover", return_value=[mock_api]) as mock_discover:
|
||||
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{"host": device.host, "timeout": device.timeout},
|
||||
|
||||
@@ -179,7 +179,7 @@ async def test_device_setup_update_authorization_error(hass):
|
||||
|
||||
async def test_device_setup_update_authentication_error(hass):
|
||||
"""Test we handle an authentication error in the update step."""
|
||||
device = get_device("Living Room")
|
||||
device = get_device("Garage")
|
||||
mock_api = device.get_mock_api()
|
||||
mock_api.check_sensors.side_effect = blke.AuthorizationError()
|
||||
mock_api.auth.side_effect = (None, blke.AuthenticationError())
|
||||
@@ -207,7 +207,7 @@ async def test_device_setup_update_authentication_error(hass):
|
||||
|
||||
async def test_device_setup_update_broadlink_exception(hass):
|
||||
"""Test we handle a Broadlink exception in the update step."""
|
||||
device = get_device("Living Room")
|
||||
device = get_device("Garage")
|
||||
mock_api = device.get_mock_api()
|
||||
mock_api.check_sensors.side_effect = blke.BroadlinkException()
|
||||
mock_entry = device.get_mock_entry()
|
||||
|
||||
@@ -67,6 +67,15 @@ LIGHTS = {
|
||||
"type": "On and Off light",
|
||||
"uniqueid": "00:00:00:00:00:00:00:03-00",
|
||||
},
|
||||
"5": {
|
||||
"ctmax": 1000,
|
||||
"ctmin": 0,
|
||||
"id": "Tunable white light with bad maxmin values id",
|
||||
"name": "Tunable white light with bad maxmin values",
|
||||
"state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True},
|
||||
"type": "Tunable white light",
|
||||
"uniqueid": "00:00:00:00:00:00:00:04-00",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -101,7 +110,7 @@ async def test_lights_and_groups(hass):
|
||||
assert "light.on_off_switch" not in gateway.deconz_ids
|
||||
assert "light.on_off_light" in gateway.deconz_ids
|
||||
|
||||
assert len(hass.states.async_all()) == 5
|
||||
assert len(hass.states.async_all()) == 6
|
||||
|
||||
rgb_light = hass.states.get("light.rgb_light")
|
||||
assert rgb_light.state == "on"
|
||||
@@ -117,6 +126,15 @@ async def test_lights_and_groups(hass):
|
||||
assert tunable_white_light.attributes["min_mireds"] == 155
|
||||
assert tunable_white_light.attributes["supported_features"] == 2
|
||||
|
||||
tunable_white_light_bad_maxmin = hass.states.get(
|
||||
"light.tunable_white_light_with_bad_maxmin_values"
|
||||
)
|
||||
assert tunable_white_light_bad_maxmin.state == "on"
|
||||
assert tunable_white_light_bad_maxmin.attributes["color_temp"] == 2500
|
||||
assert tunable_white_light_bad_maxmin.attributes["max_mireds"] == 650
|
||||
assert tunable_white_light_bad_maxmin.attributes["min_mireds"] == 140
|
||||
assert tunable_white_light_bad_maxmin.attributes["supported_features"] == 2
|
||||
|
||||
on_off_light = hass.states.get("light.on_off_light")
|
||||
assert on_off_light.state == "on"
|
||||
assert on_off_light.attributes["supported_features"] == 0
|
||||
@@ -256,7 +274,7 @@ async def test_disable_light_groups(hass):
|
||||
assert "light.empty_group" not in gateway.deconz_ids
|
||||
assert "light.on_off_switch" not in gateway.deconz_ids
|
||||
# 3 entities
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_all()) == 5
|
||||
|
||||
rgb_light = hass.states.get("light.rgb_light")
|
||||
assert rgb_light is not None
|
||||
@@ -281,7 +299,7 @@ async def test_disable_light_groups(hass):
|
||||
assert "light.empty_group" not in gateway.deconz_ids
|
||||
assert "light.on_off_switch" not in gateway.deconz_ids
|
||||
# 3 entities
|
||||
assert len(hass.states.async_all()) == 5
|
||||
assert len(hass.states.async_all()) == 6
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
gateway.config_entry, options={deconz.gateway.CONF_ALLOW_DECONZ_GROUPS: False}
|
||||
@@ -294,4 +312,4 @@ async def test_disable_light_groups(hass):
|
||||
assert "light.empty_group" not in gateway.deconz_ids
|
||||
assert "light.on_off_switch" not in gateway.deconz_ids
|
||||
# 3 entities
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_all()) == 5
|
||||
|
||||
@@ -110,6 +110,7 @@ async def test_import(
|
||||
camera=False,
|
||||
events=2,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
door2=GogoGate2Door(
|
||||
door_id=2,
|
||||
@@ -123,6 +124,7 @@ async def test_import(
|
||||
camera=False,
|
||||
events=0,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
door3=GogoGate2Door(
|
||||
door_id=3,
|
||||
@@ -136,6 +138,7 @@ async def test_import(
|
||||
camera=False,
|
||||
events=0,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
outputs=Outputs(output1=True, output2=False, output3=True),
|
||||
network=Network(ip=""),
|
||||
@@ -170,6 +173,7 @@ async def test_import(
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
door2=ISmartGateDoor(
|
||||
door_id=1,
|
||||
@@ -186,6 +190,7 @@ async def test_import(
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
door3=ISmartGateDoor(
|
||||
door_id=1,
|
||||
@@ -202,6 +207,7 @@ async def test_import(
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
network=Network(ip=""),
|
||||
wifi=Wifi(SSID="", linkquality="", signal=""),
|
||||
@@ -268,6 +274,7 @@ async def test_open_close_update(gogogat2api_mock, hass: HomeAssistant) -> None:
|
||||
camera=False,
|
||||
events=2,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
door2=GogoGate2Door(
|
||||
door_id=2,
|
||||
@@ -281,6 +288,7 @@ async def test_open_close_update(gogogat2api_mock, hass: HomeAssistant) -> None:
|
||||
camera=False,
|
||||
events=0,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
door3=GogoGate2Door(
|
||||
door_id=3,
|
||||
@@ -294,6 +302,7 @@ async def test_open_close_update(gogogat2api_mock, hass: HomeAssistant) -> None:
|
||||
camera=False,
|
||||
events=0,
|
||||
temperature=None,
|
||||
voltage=40,
|
||||
),
|
||||
outputs=Outputs(output1=True, output2=False, output3=True),
|
||||
network=Network(ip=""),
|
||||
@@ -381,6 +390,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None:
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
door2=ISmartGateDoor(
|
||||
door_id=2,
|
||||
@@ -397,6 +407,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None:
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
door3=ISmartGateDoor(
|
||||
door_id=3,
|
||||
@@ -413,6 +424,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None:
|
||||
enabled=True,
|
||||
apicode="apicode0",
|
||||
customimage=False,
|
||||
voltage=40,
|
||||
),
|
||||
network=Network(ip=""),
|
||||
wifi=Wifi(SSID="", linkquality="", signal=""),
|
||||
|
||||
@@ -737,6 +737,11 @@ async def test_reload_with_base_integration_platform_not_setup(hass):
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
hass.states.async_set("light.master_hall_lights", STATE_ON)
|
||||
hass.states.async_set("light.master_hall_lights_2", STATE_OFF)
|
||||
|
||||
hass.states.async_set("light.outside_patio_lights", STATE_OFF)
|
||||
hass.states.async_set("light.outside_patio_lights_2", STATE_OFF)
|
||||
|
||||
yaml_path = path.join(
|
||||
_get_fixtures_base_path(),
|
||||
@@ -755,6 +760,8 @@ async def test_reload_with_base_integration_platform_not_setup(hass):
|
||||
assert hass.states.get("light.light_group") is None
|
||||
assert hass.states.get("light.master_hall_lights_g") is not None
|
||||
assert hass.states.get("light.outside_patio_lights_g") is not None
|
||||
assert hass.states.get("light.master_hall_lights_g").state == STATE_ON
|
||||
assert hass.states.get("light.outside_patio_lights_g").state == STATE_OFF
|
||||
|
||||
|
||||
def _get_fixtures_base_path():
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user